diff --git "a/exp/log/log-train-2023-03-27-14-47-20-0" "b/exp/log/log-train-2023-03-27-14-47-20-0" new file mode 100644--- /dev/null +++ "b/exp/log/log-train-2023-03-27-14-47-20-0" @@ -0,0 +1,12311 @@ +2023-03-27 14:47:20,899 INFO [train.py:962] (0/4) Training started +2023-03-27 14:47:20,902 INFO [train.py:972] (0/4) Device: cuda:0 +2023-03-27 14:47:20,908 INFO [train.py:981] (0/4) {'frame_shift_ms': 10.0, 'allowed_excess_duration_ratio': 0.1, 'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.23.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': '9426c9f730820d291f5dcb06be337662595fa7b4', 'k2-git-date': 'Sun Feb 5 17:35:01 2023', 'lhotse-version': '1.13.0.dev+git.4cbd1bde.clean', 'torch-version': '1.10.0+cu102', 'torch-cuda-available': True, 'torch-cuda-version': '10.2', 'python-version': '3.8', 'icefall-git-branch': 'bbpe', 'icefall-git-sha1': 'e03c10a-dirty', 'icefall-git-date': 'Mon Mar 27 00:05:03 2023', 'icefall-path': '/ceph-kw/kangwei/code/icefall_bbpe', 'k2-path': '/ceph-hw/kangwei/code/k2_release/k2/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-hw/kangwei/dev_tools/anaconda3/envs/rnnt2/lib/python3.8/site-packages/lhotse-1.13.0.dev0+git.4cbd1bde.clean-py3.8.egg/lhotse/__init__.py', 'hostname': 'de-74279-k2-train-9-0208143539-7dcb6bfd79-b6fdq', 'IP address': '10.177.13.150'}, 'world_size': 4, 'master_port': 12535, 'tensorboard': True, 'num_epochs': 50, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('pruned_transducer_stateless7_bbpe/exp'), 'bbpe_model': 'data/lang_bbpe_500/bbpe.model', 'base_lr': 0.05, 'lr_batches': 5000, 'lr_epochs': 3.5, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 2000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,4,3,2,4', 'feedforward_dims': '1024,1024,2048,2048,1024', 'nhead': '8,8,8,8,8', 'encoder_dims': '384,384,384,384,384', 'attention_dims': '192,192,192,192,192', 'encoder_unmasked_dims': '256,256,256,256,256', 'zipformer_downsampling_factors': '1,2,4,8,2', 'cnn_module_kernels': '31,31,31,31,31', 'decoder_dim': 512, 'joiner_dim': 512, 'manifest_dir': PosixPath('data/fbank'), 'max_duration': 800, 'bucketing_sampler': True, 'num_buckets': 300, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': True, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2023-03-27 14:47:20,908 INFO [train.py:983] (0/4) About to create model +2023-03-27 14:47:21,815 INFO [zipformer.py:178] (0/4) At encoder stack 4, which has downsampling_factor=2, we will combine the outputs of layers 1 and 3, with downsampling_factors=2 and 8. +2023-03-27 14:47:21,839 INFO [train.py:987] (0/4) Number of model parameters: 70369391 +2023-03-27 14:47:28,391 INFO [train.py:1002] (0/4) Using DDP +2023-03-27 14:47:28,704 INFO [asr_datamodule.py:407] (0/4) About to get train cuts +2023-03-27 14:47:28,707 INFO [train.py:1083] (0/4) Filtering short and long utterances. +2023-03-27 14:47:28,707 INFO [train.py:1086] (0/4) Tokenizing and encoding texts in train cuts. +2023-03-27 14:47:28,707 INFO [asr_datamodule.py:224] (0/4) About to get Musan cuts +2023-03-27 14:47:31,955 INFO [asr_datamodule.py:229] (0/4) Enable MUSAN +2023-03-27 14:47:31,955 INFO [asr_datamodule.py:252] (0/4) Enable SpecAugment +2023-03-27 14:47:31,955 INFO [asr_datamodule.py:253] (0/4) Time warp factor: 80 +2023-03-27 14:47:31,955 INFO [asr_datamodule.py:263] (0/4) Num frame mask: 10 +2023-03-27 14:47:31,955 INFO [asr_datamodule.py:276] (0/4) About to create train dataset +2023-03-27 14:47:31,956 INFO [asr_datamodule.py:303] (0/4) Using DynamicBucketingSampler. +2023-03-27 14:47:42,676 INFO [asr_datamodule.py:320] (0/4) About to create train dataloader +2023-03-27 14:47:42,677 INFO [asr_datamodule.py:414] (0/4) About to get dev cuts +2023-03-27 14:47:42,679 INFO [train.py:1102] (0/4) Tokenizing and encoding texts in valid cuts. +2023-03-27 14:47:42,679 INFO [asr_datamodule.py:351] (0/4) About to create dev dataset +2023-03-27 14:47:43,506 INFO [asr_datamodule.py:370] (0/4) About to create dev dataloader +2023-03-27 14:48:25,411 INFO [train.py:892] (0/4) Epoch 1, batch 0, loss[loss=7.457, simple_loss=6.752, pruned_loss=7.038, over 19683.00 frames. ], tot_loss[loss=7.457, simple_loss=6.752, pruned_loss=7.038, over 19683.00 frames. ], batch size: 75, lr: 2.50e-02, grad_scale: 2.0 +2023-03-27 14:48:25,412 INFO [train.py:917] (0/4) Computing validation loss +2023-03-27 14:48:53,173 INFO [train.py:926] (0/4) Epoch 1, validation: loss=6.85, simple_loss=6.179, pruned_loss=6.691, over 2883724.00 frames. +2023-03-27 14:48:53,174 INFO [train.py:927] (0/4) Maximum memory allocated so far is 14795MB +2023-03-27 14:49:01,278 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-27 14:49:05,191 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=3.68 vs. limit=2.0 +2023-03-27 14:49:31,452 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 14:50:04,609 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=10.98 vs. limit=2.0 +2023-03-27 14:50:15,192 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=202.40 vs. limit=5.0 +2023-03-27 14:50:17,292 INFO [train.py:892] (0/4) Epoch 1, batch 50, loss[loss=1.187, simple_loss=1.04, pruned_loss=1.302, over 19746.00 frames. ], tot_loss[loss=2.235, simple_loss=2.024, pruned_loss=2.026, over 890658.36 frames. ], batch size: 97, lr: 2.75e-02, grad_scale: 0.5 +2023-03-27 14:50:26,550 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0517, 5.0413, 5.0525, 5.0443, 5.0543, 5.0475, 4.3349, 5.0401], + device='cuda:0'), covar=tensor([0.0012, 0.0011, 0.0018, 0.0018, 0.0013, 0.0026, 0.0023, 0.0018], + device='cuda:0'), in_proj_covar=tensor([0.0009, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009], + device='cuda:0'), out_proj_covar=tensor([9.0030e-06, 9.1429e-06, 8.9563e-06, 9.2147e-06, 8.9738e-06, 9.0880e-06, + 9.1009e-06, 9.1455e-06], device='cuda:0') +2023-03-27 14:50:51,249 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=85.63 vs. limit=5.0 +2023-03-27 14:51:13,724 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 14:51:21,723 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=8.84 vs. limit=2.0 +2023-03-27 14:51:39,745 INFO [train.py:892] (0/4) Epoch 1, batch 100, loss[loss=0.8906, simple_loss=0.7597, pruned_loss=1.032, over 19746.00 frames. ], tot_loss[loss=1.568, simple_loss=1.392, pruned_loss=1.577, over 1565816.43 frames. ], batch size: 139, lr: 3.00e-02, grad_scale: 1.0 +2023-03-27 14:51:42,869 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.423e+01 2.042e+02 3.545e+02 1.360e+03 1.838e+04, threshold=7.089e+02, percent-clipped=0.0 +2023-03-27 14:52:20,712 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=4.84 vs. limit=2.0 +2023-03-27 14:52:28,035 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=6.07 vs. limit=2.0 +2023-03-27 14:52:37,081 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=3.09 vs. limit=2.0 +2023-03-27 14:52:40,358 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=29.98 vs. limit=5.0 +2023-03-27 14:52:51,601 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=144.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 14:53:04,038 INFO [train.py:892] (0/4) Epoch 1, batch 150, loss[loss=0.8625, simple_loss=0.7333, pruned_loss=0.9344, over 19888.00 frames. ], tot_loss[loss=1.276, simple_loss=1.12, pruned_loss=1.322, over 2093328.16 frames. ], batch size: 77, lr: 3.25e-02, grad_scale: 1.0 +2023-03-27 14:53:30,861 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.5980, 4.7061, 5.4801, 5.8685, 5.7995, 5.8839, 5.9919, 1.7099], + device='cuda:0'), covar=tensor([0.0054, 0.0093, 0.0082, 0.0080, 0.0053, 0.0088, 0.0058, 0.0309], + device='cuda:0'), in_proj_covar=tensor([0.0009, 0.0009, 0.0010, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009], + device='cuda:0'), out_proj_covar=tensor([8.8604e-06, 9.0396e-06, 8.9186e-06, 9.1321e-06, 8.8662e-06, 8.8654e-06, + 8.9414e-06, 9.1245e-06], device='cuda:0') +2023-03-27 14:54:23,800 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-27 14:54:26,335 INFO [train.py:892] (0/4) Epoch 1, batch 200, loss[loss=0.7877, simple_loss=0.6753, pruned_loss=0.7682, over 19795.00 frames. ], tot_loss[loss=1.122, simple_loss=0.9772, pruned_loss=1.161, over 2505250.23 frames. ], batch size: 241, lr: 3.50e-02, grad_scale: 1.0 +2023-03-27 14:54:29,324 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 8.392e+01 1.785e+02 2.449e+02 3.359e+02 7.299e+02, threshold=4.898e+02, percent-clipped=1.0 +2023-03-27 14:55:46,828 INFO [train.py:892] (0/4) Epoch 1, batch 250, loss[loss=0.8245, simple_loss=0.7023, pruned_loss=0.7808, over 19763.00 frames. ], tot_loss[loss=1.024, simple_loss=0.8863, pruned_loss=1.051, over 2825641.43 frames. ], batch size: 233, lr: 3.75e-02, grad_scale: 1.0 +2023-03-27 14:57:03,486 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=296.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 14:57:10,683 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=300.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-27 14:57:11,393 INFO [train.py:892] (0/4) Epoch 1, batch 300, loss[loss=0.8095, simple_loss=0.6748, pruned_loss=0.7878, over 19947.00 frames. ], tot_loss[loss=0.958, simple_loss=0.8238, pruned_loss=0.9677, over 3074603.29 frames. ], batch size: 46, lr: 4.00e-02, grad_scale: 1.0 +2023-03-27 14:57:14,287 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 9.056e+01 1.487e+02 2.142e+02 2.914e+02 5.641e+02, threshold=4.285e+02, percent-clipped=2.0 +2023-03-27 14:57:31,878 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=5.61 vs. limit=5.0 +2023-03-27 14:57:43,631 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2556, 4.2574, 4.2558, 4.2563, 4.2558, 4.2535, 4.2571, 4.2564], + device='cuda:0'), covar=tensor([0.0042, 0.0044, 0.0035, 0.0046, 0.0044, 0.0045, 0.0040, 0.0037], + device='cuda:0'), in_proj_covar=tensor([0.0010, 0.0010, 0.0010, 0.0010, 0.0010, 0.0010, 0.0009, 0.0010], + device='cuda:0'), out_proj_covar=tensor([9.5202e-06, 9.6644e-06, 9.6025e-06, 9.6041e-06, 9.8829e-06, 9.5602e-06, + 9.5405e-06, 9.8495e-06], device='cuda:0') +2023-03-27 14:57:55,917 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=5.51 vs. limit=5.0 +2023-03-27 14:58:08,620 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=6.12 vs. limit=5.0 +2023-03-27 14:58:31,273 INFO [train.py:892] (0/4) Epoch 1, batch 350, loss[loss=0.7404, simple_loss=0.6209, pruned_loss=0.6743, over 19796.00 frames. ], tot_loss[loss=0.9143, simple_loss=0.7814, pruned_loss=0.9076, over 3268574.70 frames. ], batch size: 174, lr: 4.25e-02, grad_scale: 1.0 +2023-03-27 14:58:40,894 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=357.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 14:59:30,237 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=387.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 14:59:51,339 INFO [train.py:892] (0/4) Epoch 1, batch 400, loss[loss=1.062, simple_loss=0.8961, pruned_loss=0.9118, over 18751.00 frames. ], tot_loss[loss=0.8833, simple_loss=0.7505, pruned_loss=0.8601, over 3418905.24 frames. ], batch size: 564, lr: 4.50e-02, grad_scale: 2.0 +2023-03-27 14:59:54,325 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.540e+02 2.069e+02 2.975e+02 6.292e+02, threshold=4.137e+02, percent-clipped=2.0 +2023-03-27 15:00:51,862 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=439.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 15:01:07,618 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=448.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-27 15:01:12,540 INFO [train.py:892] (0/4) Epoch 1, batch 450, loss[loss=0.7446, simple_loss=0.6195, pruned_loss=0.6433, over 19476.00 frames. ], tot_loss[loss=0.8582, simple_loss=0.726, pruned_loss=0.8165, over 3536385.99 frames. ], batch size: 43, lr: 4.75e-02, grad_scale: 2.0 +2023-03-27 15:02:30,371 INFO [train.py:892] (0/4) Epoch 1, batch 500, loss[loss=0.7452, simple_loss=0.6275, pruned_loss=0.6031, over 19839.00 frames. ], tot_loss[loss=0.8364, simple_loss=0.7059, pruned_loss=0.7742, over 3627719.42 frames. ], batch size: 60, lr: 4.99e-02, grad_scale: 2.0 +2023-03-27 15:02:32,872 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=7.35 vs. limit=5.0 +2023-03-27 15:02:33,256 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.044e+02 2.386e+02 3.363e+02 4.760e+02 1.006e+03, threshold=6.727e+02, percent-clipped=34.0 +2023-03-27 15:02:39,995 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=7.00 vs. limit=5.0 +2023-03-27 15:03:19,094 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5501, 3.7590, 3.7325, 3.8911, 3.9147, 4.2672, 3.9565, 3.6693], + device='cuda:0'), covar=tensor([0.1897, 0.1484, 0.1067, 0.0926, 0.0815, 0.0757, 0.0909, 0.0700], + device='cuda:0'), in_proj_covar=tensor([0.0014, 0.0014, 0.0015, 0.0015, 0.0013, 0.0014, 0.0013, 0.0014], + device='cuda:0'), out_proj_covar=tensor([1.3310e-05, 1.2731e-05, 1.2825e-05, 1.2783e-05, 1.2064e-05, 1.2458e-05, + 1.2591e-05, 1.2086e-05], device='cuda:0') +2023-03-27 15:03:21,933 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8233, 3.0526, 3.2053, 3.2116, 3.4013, 3.5353, 3.4328, 3.1467], + device='cuda:0'), covar=tensor([0.1919, 0.2141, 0.1175, 0.1284, 0.0998, 0.1038, 0.1078, 0.0849], + device='cuda:0'), in_proj_covar=tensor([0.0014, 0.0015, 0.0015, 0.0015, 0.0013, 0.0014, 0.0013, 0.0014], + device='cuda:0'), out_proj_covar=tensor([1.3322e-05, 1.2778e-05, 1.2797e-05, 1.2769e-05, 1.2083e-05, 1.2460e-05, + 1.2621e-05, 1.2060e-05], device='cuda:0') +2023-03-27 15:03:35,063 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=2.19 vs. limit=2.0 +2023-03-27 15:03:50,987 INFO [train.py:892] (0/4) Epoch 1, batch 550, loss[loss=0.8349, simple_loss=0.697, pruned_loss=0.6722, over 19706.00 frames. ], tot_loss[loss=0.8111, simple_loss=0.6842, pruned_loss=0.7287, over 3700595.95 frames. ], batch size: 315, lr: 4.98e-02, grad_scale: 2.0 +2023-03-27 15:04:10,892 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=562.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:04:20,984 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=568.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:04:46,733 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=2.00 vs. limit=2.0 +2023-03-27 15:04:55,576 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=590.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:05:13,760 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=600.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 15:05:14,483 INFO [train.py:892] (0/4) Epoch 1, batch 600, loss[loss=0.6748, simple_loss=0.5727, pruned_loss=0.5079, over 19867.00 frames. ], tot_loss[loss=0.7854, simple_loss=0.6635, pruned_loss=0.6831, over 3756417.62 frames. ], batch size: 89, lr: 4.98e-02, grad_scale: 2.0 +2023-03-27 15:05:17,528 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.786e+02 4.093e+02 5.434e+02 6.548e+02 1.823e+03, threshold=1.087e+03, percent-clipped=20.0 +2023-03-27 15:05:49,207 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=623.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 15:05:59,406 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=629.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-27 15:05:59,613 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.12 vs. limit=2.0 +2023-03-27 15:06:31,652 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=648.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:06:35,600 INFO [train.py:892] (0/4) Epoch 1, batch 650, loss[loss=0.6274, simple_loss=0.5404, pruned_loss=0.4453, over 19821.00 frames. ], tot_loss[loss=0.7611, simple_loss=0.6442, pruned_loss=0.6408, over 3800636.85 frames. ], batch size: 121, lr: 4.98e-02, grad_scale: 2.0 +2023-03-27 15:06:36,316 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=651.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 15:06:37,785 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=652.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 15:07:28,299 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.24 vs. limit=2.0 +2023-03-27 15:07:56,921 INFO [train.py:892] (0/4) Epoch 1, batch 700, loss[loss=0.6689, simple_loss=0.5642, pruned_loss=0.49, over 19816.00 frames. ], tot_loss[loss=0.7372, simple_loss=0.6255, pruned_loss=0.6012, over 3835764.99 frames. ], batch size: 103, lr: 4.98e-02, grad_scale: 2.0 +2023-03-27 15:07:59,945 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.633e+02 4.666e+02 6.058e+02 9.217e+02 2.342e+03, threshold=1.212e+03, percent-clipped=17.0 +2023-03-27 15:08:59,023 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=739.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-27 15:09:06,935 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=743.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-27 15:09:20,061 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=2.02 vs. limit=2.0 +2023-03-27 15:09:20,450 INFO [train.py:892] (0/4) Epoch 1, batch 750, loss[loss=0.5741, simple_loss=0.4974, pruned_loss=0.3884, over 19795.00 frames. ], tot_loss[loss=0.7195, simple_loss=0.6118, pruned_loss=0.5692, over 3860580.81 frames. ], batch size: 105, lr: 4.97e-02, grad_scale: 2.0 +2023-03-27 15:10:00,854 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.9570, 1.8787, 2.5435, 2.0729, 2.1660, 2.6622, 2.5605, 1.6371], + device='cuda:0'), covar=tensor([0.5528, 0.6445, 0.4060, 0.4858, 0.4871, 0.3812, 0.4568, 0.7042], + device='cuda:0'), in_proj_covar=tensor([0.0055, 0.0056, 0.0057, 0.0052, 0.0056, 0.0053, 0.0057, 0.0060], + device='cuda:0'), out_proj_covar=tensor([4.9621e-05, 5.0150e-05, 5.0540e-05, 4.7949e-05, 5.2045e-05, 4.8696e-05, + 5.2512e-05, 5.5171e-05], device='cuda:0') +2023-03-27 15:10:17,284 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=787.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:10:39,417 INFO [train.py:892] (0/4) Epoch 1, batch 800, loss[loss=0.661, simple_loss=0.5696, pruned_loss=0.4463, over 19794.00 frames. ], tot_loss[loss=0.7028, simple_loss=0.5995, pruned_loss=0.5396, over 3879606.17 frames. ], batch size: 48, lr: 4.97e-02, grad_scale: 4.0 +2023-03-27 15:10:42,238 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.256e+02 4.881e+02 6.070e+02 8.660e+02 1.858e+03, threshold=1.214e+03, percent-clipped=11.0 +2023-03-27 15:10:47,669 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6891, 4.9963, 5.0183, 4.6707, 5.0226, 4.7627, 4.6836, 4.5739], + device='cuda:0'), covar=tensor([0.0918, 0.0656, 0.0745, 0.0814, 0.0648, 0.0863, 0.1201, 0.1002], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0025, 0.0026, 0.0027, 0.0025, 0.0027, 0.0027, 0.0027], + device='cuda:0'), out_proj_covar=tensor([2.8098e-05, 2.6653e-05, 2.5898e-05, 2.6505e-05, 2.5060e-05, 2.5878e-05, + 2.7633e-05, 2.5322e-05], device='cuda:0') +2023-03-27 15:11:53,934 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=847.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:12:00,806 INFO [train.py:892] (0/4) Epoch 1, batch 850, loss[loss=0.6781, simple_loss=0.5798, pruned_loss=0.4589, over 19771.00 frames. ], tot_loss[loss=0.6853, simple_loss=0.5861, pruned_loss=0.5118, over 3895789.56 frames. ], batch size: 273, lr: 4.96e-02, grad_scale: 4.0 +2023-03-27 15:14:32,560 INFO [train.py:892] (0/4) Epoch 1, batch 900, loss[loss=0.6248, simple_loss=0.5419, pruned_loss=0.4055, over 19836.00 frames. ], tot_loss[loss=0.6705, simple_loss=0.5744, pruned_loss=0.4887, over 3908182.89 frames. ], batch size: 145, lr: 4.96e-02, grad_scale: 4.0 +2023-03-27 15:14:38,994 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.144e+02 5.792e+02 7.433e+02 9.367e+02 4.103e+03, threshold=1.487e+03, percent-clipped=16.0 +2023-03-27 15:14:51,760 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9535, 2.9282, 2.3048, 2.8925, 3.4509, 2.7898, 2.6738, 2.8327], + device='cuda:0'), covar=tensor([0.1207, 0.1485, 0.2506, 0.1174, 0.0777, 0.1978, 0.1486, 0.0980], + device='cuda:0'), in_proj_covar=tensor([0.0023, 0.0021, 0.0023, 0.0020, 0.0019, 0.0023, 0.0023, 0.0021], + device='cuda:0'), out_proj_covar=tensor([2.0216e-05, 1.6819e-05, 2.1002e-05, 1.6431e-05, 1.6054e-05, 2.1138e-05, + 2.1445e-05, 1.7143e-05], device='cuda:0') +2023-03-27 15:14:55,593 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=908.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-27 15:15:25,485 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=918.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 15:15:44,750 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=924.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-27 15:16:59,719 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=946.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-27 15:17:13,644 INFO [train.py:892] (0/4) Epoch 1, batch 950, loss[loss=0.6352, simple_loss=0.5487, pruned_loss=0.4109, over 19772.00 frames. ], tot_loss[loss=0.6558, simple_loss=0.5638, pruned_loss=0.4659, over 3916009.75 frames. ], batch size: 263, lr: 4.96e-02, grad_scale: 4.0 +2023-03-27 15:17:16,391 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=952.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 15:17:47,542 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9846, 3.7272, 3.5872, 3.8347, 3.8322, 3.8445, 3.9423, 3.8702], + device='cuda:0'), covar=tensor([0.0591, 0.0821, 0.0912, 0.0972, 0.0751, 0.0724, 0.0834, 0.0873], + device='cuda:0'), in_proj_covar=tensor([0.0034, 0.0040, 0.0040, 0.0040, 0.0038, 0.0041, 0.0040, 0.0036], + device='cuda:0'), out_proj_covar=tensor([3.0350e-05, 3.9270e-05, 3.7438e-05, 4.0315e-05, 3.5461e-05, 3.9547e-05, + 3.9633e-05, 3.6738e-05], device='cuda:0') +2023-03-27 15:19:15,026 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1000.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:19:15,775 INFO [train.py:892] (0/4) Epoch 1, batch 1000, loss[loss=0.6061, simple_loss=0.5184, pruned_loss=0.3949, over 19746.00 frames. ], tot_loss[loss=0.6446, simple_loss=0.5554, pruned_loss=0.448, over 3923476.44 frames. ], batch size: 205, lr: 4.95e-02, grad_scale: 4.0 +2023-03-27 15:19:21,174 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.942e+02 5.404e+02 6.507e+02 8.567e+02 3.462e+03, threshold=1.301e+03, percent-clipped=3.0 +2023-03-27 15:20:17,817 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.95 vs. limit=2.0 +2023-03-27 15:21:22,310 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1043.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 15:21:50,409 INFO [train.py:892] (0/4) Epoch 1, batch 1050, loss[loss=0.6093, simple_loss=0.5389, pruned_loss=0.37, over 19957.00 frames. ], tot_loss[loss=0.6361, simple_loss=0.5489, pruned_loss=0.4335, over 3928452.79 frames. ], batch size: 53, lr: 4.95e-02, grad_scale: 4.0 +2023-03-27 15:23:33,620 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.93 vs. limit=5.0 +2023-03-27 15:23:43,152 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1091.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:24:13,073 INFO [train.py:892] (0/4) Epoch 1, batch 1100, loss[loss=0.5955, simple_loss=0.5188, pruned_loss=0.3685, over 19745.00 frames. ], tot_loss[loss=0.6232, simple_loss=0.5399, pruned_loss=0.4156, over 3933382.73 frames. ], batch size: 259, lr: 4.94e-02, grad_scale: 4.0 +2023-03-27 15:24:19,016 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.337e+02 6.074e+02 7.964e+02 1.002e+03 2.431e+03, threshold=1.593e+03, percent-clipped=21.0 +2023-03-27 15:26:44,132 INFO [train.py:892] (0/4) Epoch 1, batch 1150, loss[loss=0.5464, simple_loss=0.4836, pruned_loss=0.3268, over 19754.00 frames. ], tot_loss[loss=0.6122, simple_loss=0.532, pruned_loss=0.4007, over 3936451.04 frames. ], batch size: 102, lr: 4.94e-02, grad_scale: 4.0 +2023-03-27 15:26:48,642 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1153.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:28:37,501 INFO [train.py:892] (0/4) Epoch 1, batch 1200, loss[loss=0.5683, simple_loss=0.5218, pruned_loss=0.3172, over 19575.00 frames. ], tot_loss[loss=0.6053, simple_loss=0.5271, pruned_loss=0.3897, over 3938982.30 frames. ], batch size: 53, lr: 4.93e-02, grad_scale: 8.0 +2023-03-27 15:28:40,926 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.874e+02 6.275e+02 7.572e+02 9.981e+02 2.448e+03, threshold=1.514e+03, percent-clipped=4.0 +2023-03-27 15:28:41,654 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1203.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 15:29:12,168 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1214.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-27 15:29:24,483 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1218.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 15:29:45,970 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1224.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:30:50,517 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1246.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 15:31:03,279 INFO [train.py:892] (0/4) Epoch 1, batch 1250, loss[loss=0.5171, simple_loss=0.4609, pruned_loss=0.3022, over 19827.00 frames. ], tot_loss[loss=0.5895, simple_loss=0.5159, pruned_loss=0.3726, over 3942131.04 frames. ], batch size: 147, lr: 4.92e-02, grad_scale: 8.0 +2023-03-27 15:31:21,211 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=2.02 vs. limit=2.0 +2023-03-27 15:31:47,659 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1266.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:31:59,819 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1272.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 15:32:47,346 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1294.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 15:32:57,677 INFO [train.py:892] (0/4) Epoch 1, batch 1300, loss[loss=0.4941, simple_loss=0.4486, pruned_loss=0.2792, over 19734.00 frames. ], tot_loss[loss=0.5845, simple_loss=0.5125, pruned_loss=0.3643, over 3941857.29 frames. ], batch size: 77, lr: 4.92e-02, grad_scale: 8.0 +2023-03-27 15:33:00,840 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.916e+02 5.972e+02 8.796e+02 1.171e+03 2.494e+03, threshold=1.759e+03, percent-clipped=14.0 +2023-03-27 15:34:24,476 INFO [train.py:892] (0/4) Epoch 1, batch 1350, loss[loss=0.5051, simple_loss=0.4488, pruned_loss=0.2936, over 19789.00 frames. ], tot_loss[loss=0.5763, simple_loss=0.5071, pruned_loss=0.3539, over 3943289.74 frames. ], batch size: 172, lr: 4.91e-02, grad_scale: 8.0 +2023-03-27 15:34:25,448 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3350, 5.3549, 5.2276, 5.2797, 5.3267, 5.3542, 5.2099, 4.9075], + device='cuda:0'), covar=tensor([0.0205, 0.0278, 0.0391, 0.0262, 0.0246, 0.0277, 0.0287, 0.0269], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0030, 0.0033, 0.0032, 0.0032, 0.0032, 0.0033, 0.0036], + device='cuda:0'), out_proj_covar=tensor([2.9391e-05, 2.9411e-05, 3.2836e-05, 3.2403e-05, 2.9542e-05, 3.0296e-05, + 3.1040e-05, 3.3452e-05], device='cuda:0') +2023-03-27 15:35:50,285 INFO [train.py:892] (0/4) Epoch 1, batch 1400, loss[loss=0.5433, simple_loss=0.4976, pruned_loss=0.3012, over 19849.00 frames. ], tot_loss[loss=0.5677, simple_loss=0.5015, pruned_loss=0.3436, over 3944569.98 frames. ], batch size: 56, lr: 4.91e-02, grad_scale: 8.0 +2023-03-27 15:35:53,769 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.724e+02 6.220e+02 8.188e+02 1.056e+03 1.766e+03, threshold=1.638e+03, percent-clipped=1.0 +2023-03-27 15:36:49,998 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1434.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:37:19,445 INFO [train.py:892] (0/4) Epoch 1, batch 1450, loss[loss=0.5861, simple_loss=0.5117, pruned_loss=0.3453, over 19809.00 frames. ], tot_loss[loss=0.561, simple_loss=0.4969, pruned_loss=0.3357, over 3945345.24 frames. ], batch size: 72, lr: 4.90e-02, grad_scale: 8.0 +2023-03-27 15:38:38,389 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1495.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-27 15:38:47,161 INFO [train.py:892] (0/4) Epoch 1, batch 1500, loss[loss=0.4971, simple_loss=0.4534, pruned_loss=0.2757, over 19812.00 frames. ], tot_loss[loss=0.5495, simple_loss=0.4894, pruned_loss=0.3242, over 3945920.31 frames. ], batch size: 96, lr: 4.89e-02, grad_scale: 8.0 +2023-03-27 15:38:51,708 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.400e+02 6.710e+02 8.347e+02 1.087e+03 2.003e+03, threshold=1.669e+03, percent-clipped=5.0 +2023-03-27 15:38:52,578 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1503.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:39:02,501 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1509.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 15:39:21,049 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.04 vs. limit=2.0 +2023-03-27 15:39:23,810 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8210, 4.6878, 4.6313, 4.5380, 4.7488, 4.8022, 4.6671, 4.7618], + device='cuda:0'), covar=tensor([0.0281, 0.0263, 0.0353, 0.0361, 0.0234, 0.0303, 0.0305, 0.0310], + device='cuda:0'), in_proj_covar=tensor([0.0043, 0.0046, 0.0045, 0.0048, 0.0043, 0.0043, 0.0044, 0.0037], + device='cuda:0'), out_proj_covar=tensor([3.8274e-05, 4.2868e-05, 4.5044e-05, 4.6566e-05, 3.8623e-05, 4.2887e-05, + 4.0386e-05, 3.3921e-05], device='cuda:0') +2023-03-27 15:40:11,653 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.16 vs. limit=2.0 +2023-03-27 15:40:15,406 INFO [train.py:892] (0/4) Epoch 1, batch 1550, loss[loss=0.7431, simple_loss=0.6708, pruned_loss=0.4166, over 17889.00 frames. ], tot_loss[loss=0.5418, simple_loss=0.4847, pruned_loss=0.3158, over 3944315.59 frames. ], batch size: 633, lr: 4.89e-02, grad_scale: 8.0 +2023-03-27 15:40:16,058 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1551.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 15:40:43,779 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1566.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:41:38,272 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-27 15:41:45,849 INFO [train.py:892] (0/4) Epoch 1, batch 1600, loss[loss=0.4801, simple_loss=0.4453, pruned_loss=0.2596, over 19696.00 frames. ], tot_loss[loss=0.5327, simple_loss=0.4787, pruned_loss=0.3069, over 3946303.67 frames. ], batch size: 46, lr: 4.88e-02, grad_scale: 8.0 +2023-03-27 15:41:50,275 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.877e+02 6.710e+02 8.151e+02 1.107e+03 2.056e+03, threshold=1.630e+03, percent-clipped=4.0 +2023-03-27 15:42:03,852 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1611.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:42:30,878 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1627.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 15:43:13,160 INFO [train.py:892] (0/4) Epoch 1, batch 1650, loss[loss=0.5462, simple_loss=0.4912, pruned_loss=0.3058, over 19765.00 frames. ], tot_loss[loss=0.5288, simple_loss=0.4763, pruned_loss=0.3021, over 3946966.86 frames. ], batch size: 236, lr: 4.87e-02, grad_scale: 8.0 +2023-03-27 15:43:50,579 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1672.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-27 15:44:39,832 INFO [train.py:892] (0/4) Epoch 1, batch 1700, loss[loss=0.4619, simple_loss=0.4402, pruned_loss=0.2414, over 19581.00 frames. ], tot_loss[loss=0.5193, simple_loss=0.4709, pruned_loss=0.2932, over 3947706.09 frames. ], batch size: 49, lr: 4.86e-02, grad_scale: 8.0 +2023-03-27 15:44:43,067 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.702e+02 6.108e+02 7.327e+02 1.005e+03 2.757e+03, threshold=1.465e+03, percent-clipped=5.0 +2023-03-27 15:46:05,077 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7474, 2.2726, 2.2715, 3.0439, 3.1511, 1.9462, 2.9012, 2.5695], + device='cuda:0'), covar=tensor([0.1093, 0.0825, 0.1338, 0.0481, 0.0549, 0.1756, 0.0855, 0.0798], + device='cuda:0'), in_proj_covar=tensor([0.0044, 0.0046, 0.0047, 0.0045, 0.0049, 0.0045, 0.0044, 0.0044], + device='cuda:0'), out_proj_covar=tensor([4.0341e-05, 4.1119e-05, 4.1164e-05, 3.9267e-05, 4.5075e-05, 4.0507e-05, + 3.7846e-05, 4.0016e-05], device='cuda:0') +2023-03-27 15:46:05,815 INFO [train.py:892] (0/4) Epoch 1, batch 1750, loss[loss=0.4478, simple_loss=0.4383, pruned_loss=0.2267, over 19864.00 frames. ], tot_loss[loss=0.5139, simple_loss=0.468, pruned_loss=0.2876, over 3945385.39 frames. ], batch size: 48, lr: 4.86e-02, grad_scale: 8.0 +2023-03-27 15:47:05,104 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1790.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 15:47:20,401 INFO [train.py:892] (0/4) Epoch 1, batch 1800, loss[loss=0.7217, simple_loss=0.6154, pruned_loss=0.4211, over 19409.00 frames. ], tot_loss[loss=0.5126, simple_loss=0.4674, pruned_loss=0.2853, over 3946049.37 frames. ], batch size: 412, lr: 4.85e-02, grad_scale: 8.0 +2023-03-27 15:47:23,278 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.834e+02 6.979e+02 1.011e+03 1.306e+03 2.784e+03, threshold=2.021e+03, percent-clipped=17.0 +2023-03-27 15:47:32,279 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1809.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:48:13,719 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1838.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 15:48:31,000 INFO [train.py:892] (0/4) Epoch 1, batch 1850, loss[loss=0.4967, simple_loss=0.4691, pruned_loss=0.2623, over 19672.00 frames. ], tot_loss[loss=0.5084, simple_loss=0.4662, pruned_loss=0.2803, over 3945994.84 frames. ], batch size: 56, lr: 4.84e-02, grad_scale: 8.0 +2023-03-27 15:48:37,146 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-1.pt +2023-03-27 15:49:26,261 INFO [train.py:892] (0/4) Epoch 2, batch 0, loss[loss=0.498, simple_loss=0.4457, pruned_loss=0.277, over 19760.00 frames. ], tot_loss[loss=0.498, simple_loss=0.4457, pruned_loss=0.277, over 19760.00 frames. ], batch size: 182, lr: 4.75e-02, grad_scale: 8.0 +2023-03-27 15:49:26,262 INFO [train.py:917] (0/4) Computing validation loss +2023-03-27 15:49:48,746 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.4848, 0.7376, 1.4382, 0.9674, 1.2972, 1.4838, 1.4330, 1.3098], + device='cuda:0'), covar=tensor([0.5096, 1.5934, 0.4387, 1.3410, 0.5425, 0.3416, 0.2767, 0.3499], + device='cuda:0'), in_proj_covar=tensor([0.0026, 0.0049, 0.0025, 0.0052, 0.0028, 0.0032, 0.0027, 0.0027], + device='cuda:0'), out_proj_covar=tensor([1.9811e-05, 4.4370e-05, 1.9254e-05, 4.6820e-05, 2.2890e-05, 2.4864e-05, + 2.0715e-05, 1.9702e-05], device='cuda:0') +2023-03-27 15:49:52,687 INFO [train.py:926] (0/4) Epoch 2, validation: loss=0.3819, simple_loss=0.4085, pruned_loss=0.1743, over 2883724.00 frames. +2023-03-27 15:49:52,688 INFO [train.py:927] (0/4) Maximum memory allocated so far is 18009MB +2023-03-27 15:49:56,681 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1857.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 15:49:56,786 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1857.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:50:22,325 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1870.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 15:51:15,898 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1899.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-27 15:51:22,947 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.599e+02 7.082e+02 8.943e+02 1.129e+03 2.587e+03, threshold=1.789e+03, percent-clipped=3.0 +2023-03-27 15:51:29,936 INFO [train.py:892] (0/4) Epoch 2, batch 50, loss[loss=0.3777, simple_loss=0.3918, pruned_loss=0.1802, over 19613.00 frames. ], tot_loss[loss=0.4694, simple_loss=0.4415, pruned_loss=0.2488, over 889752.07 frames. ], batch size: 51, lr: 4.74e-02, grad_scale: 8.0 +2023-03-27 15:51:48,502 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.05 vs. limit=2.0 +2023-03-27 15:51:53,346 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1918.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-27 15:51:59,840 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1922.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 15:52:16,671 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1931.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-27 15:52:51,472 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2266, 3.1373, 3.2513, 3.3849, 3.2624, 3.0129, 2.7403, 3.0432], + device='cuda:0'), covar=tensor([0.0328, 0.0233, 0.0563, 0.0232, 0.0213, 0.0330, 0.0401, 0.0287], + device='cuda:0'), in_proj_covar=tensor([0.0032, 0.0029, 0.0031, 0.0029, 0.0032, 0.0035, 0.0037, 0.0034], + device='cuda:0'), out_proj_covar=tensor([2.5537e-05, 2.3005e-05, 2.6740e-05, 2.1990e-05, 2.5017e-05, 2.8645e-05, + 3.0695e-05, 2.8095e-05], device='cuda:0') +2023-03-27 15:53:00,439 INFO [train.py:892] (0/4) Epoch 2, batch 100, loss[loss=0.4746, simple_loss=0.4599, pruned_loss=0.2444, over 19903.00 frames. ], tot_loss[loss=0.4758, simple_loss=0.4451, pruned_loss=0.2535, over 1568875.69 frames. ], batch size: 50, lr: 4.73e-02, grad_scale: 8.0 +2023-03-27 15:53:21,305 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1967.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 15:54:20,772 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-2000.pt +2023-03-27 15:54:28,039 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.89 vs. limit=2.0 +2023-03-27 15:54:30,522 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.745e+02 7.981e+02 1.059e+03 1.451e+03 2.427e+03, threshold=2.118e+03, percent-clipped=7.0 +2023-03-27 15:54:36,162 INFO [train.py:892] (0/4) Epoch 2, batch 150, loss[loss=0.4012, simple_loss=0.4004, pruned_loss=0.201, over 19950.00 frames. ], tot_loss[loss=0.4716, simple_loss=0.4429, pruned_loss=0.2503, over 2097411.74 frames. ], batch size: 46, lr: 4.72e-02, grad_scale: 8.0 +2023-03-27 15:56:17,765 INFO [train.py:892] (0/4) Epoch 2, batch 200, loss[loss=0.4169, simple_loss=0.4174, pruned_loss=0.2082, over 19871.00 frames. ], tot_loss[loss=0.462, simple_loss=0.4387, pruned_loss=0.2428, over 2507566.18 frames. ], batch size: 46, lr: 4.72e-02, grad_scale: 16.0 +2023-03-27 15:57:23,674 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.25 vs. limit=2.0 +2023-03-27 15:57:25,322 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2090.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 15:57:51,092 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.526e+02 6.247e+02 7.217e+02 9.235e+02 2.365e+03, threshold=1.443e+03, percent-clipped=1.0 +2023-03-27 15:57:56,590 INFO [train.py:892] (0/4) Epoch 2, batch 250, loss[loss=0.4222, simple_loss=0.4155, pruned_loss=0.2145, over 19694.00 frames. ], tot_loss[loss=0.4529, simple_loss=0.4328, pruned_loss=0.2365, over 2828276.19 frames. ], batch size: 46, lr: 4.71e-02, grad_scale: 16.0 +2023-03-27 15:59:00,447 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2138.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 15:59:32,881 INFO [train.py:892] (0/4) Epoch 2, batch 300, loss[loss=0.387, simple_loss=0.3868, pruned_loss=0.1936, over 19703.00 frames. ], tot_loss[loss=0.4502, simple_loss=0.4326, pruned_loss=0.2339, over 3077074.38 frames. ], batch size: 101, lr: 4.70e-02, grad_scale: 16.0 +2023-03-27 15:59:40,041 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2069, 5.6404, 5.8047, 5.8734, 5.8968, 5.3078, 5.5503, 5.3097], + device='cuda:0'), covar=tensor([0.0671, 0.0512, 0.0771, 0.0388, 0.0596, 0.0943, 0.0596, 0.1204], + device='cuda:0'), in_proj_covar=tensor([0.0108, 0.0081, 0.0109, 0.0085, 0.0096, 0.0091, 0.0095, 0.0124], + device='cuda:0'), out_proj_covar=tensor([1.1735e-04, 9.1453e-05, 1.2265e-04, 9.4426e-05, 1.1961e-04, 1.0134e-04, + 1.0102e-04, 1.4449e-04], device='cuda:0') +2023-03-27 16:00:53,317 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2194.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:01:07,392 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.82 vs. limit=2.0 +2023-03-27 16:01:11,031 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.616e+02 6.231e+02 7.603e+02 9.238e+02 1.387e+03, threshold=1.521e+03, percent-clipped=0.0 +2023-03-27 16:01:18,830 INFO [train.py:892] (0/4) Epoch 2, batch 350, loss[loss=0.6919, simple_loss=0.608, pruned_loss=0.3879, over 19188.00 frames. ], tot_loss[loss=0.4481, simple_loss=0.4323, pruned_loss=0.232, over 3268889.88 frames. ], batch size: 452, lr: 4.69e-02, grad_scale: 16.0 +2023-03-27 16:01:32,702 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2213.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:01:49,438 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2222.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 16:01:56,880 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2226.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:02:20,782 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-27 16:02:34,117 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2324, 4.8788, 4.7076, 5.1070, 4.7829, 5.3757, 4.9520, 5.3959], + device='cuda:0'), covar=tensor([0.0211, 0.0227, 0.0348, 0.0209, 0.0373, 0.0097, 0.0243, 0.0273], + device='cuda:0'), in_proj_covar=tensor([0.0057, 0.0069, 0.0077, 0.0065, 0.0073, 0.0061, 0.0078, 0.0068], + device='cuda:0'), out_proj_covar=tensor([5.5405e-05, 7.5135e-05, 8.5494e-05, 6.8938e-05, 8.0508e-05, 6.7568e-05, + 7.9345e-05, 7.6752e-05], device='cuda:0') +2023-03-27 16:02:54,469 INFO [train.py:892] (0/4) Epoch 2, batch 400, loss[loss=0.4403, simple_loss=0.414, pruned_loss=0.2333, over 19899.00 frames. ], tot_loss[loss=0.4444, simple_loss=0.4292, pruned_loss=0.2298, over 3420949.26 frames. ], batch size: 94, lr: 4.68e-02, grad_scale: 16.0 +2023-03-27 16:03:15,686 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2267.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 16:03:20,990 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2270.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:03:59,736 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5970, 3.5314, 3.5870, 3.9511, 3.2251, 3.2868, 3.3666, 3.2313], + device='cuda:0'), covar=tensor([0.0259, 0.0257, 0.0249, 0.0173, 0.0366, 0.0254, 0.0297, 0.0525], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0033, 0.0032, 0.0027, 0.0032, 0.0034, 0.0034, 0.0035], + device='cuda:0'), out_proj_covar=tensor([2.3068e-05, 2.7157e-05, 2.4635e-05, 2.0098e-05, 2.4798e-05, 2.6347e-05, + 2.7765e-05, 2.8414e-05], device='cuda:0') +2023-03-27 16:04:26,669 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.159e+02 7.202e+02 8.951e+02 1.212e+03 2.125e+03, threshold=1.790e+03, percent-clipped=11.0 +2023-03-27 16:04:31,840 INFO [train.py:892] (0/4) Epoch 2, batch 450, loss[loss=0.6077, simple_loss=0.5508, pruned_loss=0.3323, over 19424.00 frames. ], tot_loss[loss=0.4465, simple_loss=0.4314, pruned_loss=0.2308, over 3537830.03 frames. ], batch size: 412, lr: 4.67e-02, grad_scale: 16.0 +2023-03-27 16:04:51,214 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2315.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:05:40,723 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2340.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 16:06:13,198 INFO [train.py:892] (0/4) Epoch 2, batch 500, loss[loss=0.3761, simple_loss=0.3811, pruned_loss=0.1856, over 19895.00 frames. ], tot_loss[loss=0.4393, simple_loss=0.4268, pruned_loss=0.2259, over 3630946.11 frames. ], batch size: 94, lr: 4.66e-02, grad_scale: 16.0 +2023-03-27 16:07:17,624 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.00 vs. limit=2.0 +2023-03-27 16:07:39,828 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.8490, 5.9878, 5.9217, 5.9603, 5.9670, 6.0603, 5.5954, 5.2240], + device='cuda:0'), covar=tensor([0.0238, 0.0258, 0.0299, 0.0184, 0.0168, 0.0251, 0.0254, 0.0395], + device='cuda:0'), in_proj_covar=tensor([0.0063, 0.0060, 0.0069, 0.0063, 0.0066, 0.0059, 0.0073, 0.0089], + device='cuda:0'), out_proj_covar=tensor([6.9124e-05, 6.4820e-05, 7.3918e-05, 6.8825e-05, 6.7792e-05, 6.2368e-05, + 7.5927e-05, 9.7935e-05], device='cuda:0') +2023-03-27 16:07:43,977 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2401.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-27 16:07:46,784 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.849e+02 6.111e+02 7.665e+02 9.809e+02 1.525e+03, threshold=1.533e+03, percent-clipped=0.0 +2023-03-27 16:07:51,924 INFO [train.py:892] (0/4) Epoch 2, batch 550, loss[loss=0.4686, simple_loss=0.4552, pruned_loss=0.241, over 19745.00 frames. ], tot_loss[loss=0.4366, simple_loss=0.4256, pruned_loss=0.2238, over 3699637.19 frames. ], batch size: 259, lr: 4.65e-02, grad_scale: 16.0 +2023-03-27 16:09:31,335 INFO [train.py:892] (0/4) Epoch 2, batch 600, loss[loss=0.3926, simple_loss=0.3853, pruned_loss=0.1999, over 19787.00 frames. ], tot_loss[loss=0.4355, simple_loss=0.4254, pruned_loss=0.2228, over 3752322.64 frames. ], batch size: 172, lr: 4.64e-02, grad_scale: 16.0 +2023-03-27 16:10:49,076 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2494.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 16:11:07,766 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.512e+02 7.276e+02 8.474e+02 1.052e+03 1.837e+03, threshold=1.695e+03, percent-clipped=3.0 +2023-03-27 16:11:13,424 INFO [train.py:892] (0/4) Epoch 2, batch 650, loss[loss=0.4194, simple_loss=0.4023, pruned_loss=0.2183, over 19806.00 frames. ], tot_loss[loss=0.4281, simple_loss=0.4197, pruned_loss=0.2183, over 3797536.39 frames. ], batch size: 148, lr: 4.64e-02, grad_scale: 16.0 +2023-03-27 16:11:27,193 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2513.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:11:50,880 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2526.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:12:26,493 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2542.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:12:51,150 INFO [train.py:892] (0/4) Epoch 2, batch 700, loss[loss=0.3935, simple_loss=0.4035, pruned_loss=0.1918, over 19820.00 frames. ], tot_loss[loss=0.4253, simple_loss=0.4183, pruned_loss=0.2161, over 3831216.79 frames. ], batch size: 57, lr: 4.63e-02, grad_scale: 16.0 +2023-03-27 16:13:01,016 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2561.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:13:27,668 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2574.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:14:23,666 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.137e+02 6.142e+02 8.213e+02 1.055e+03 2.684e+03, threshold=1.643e+03, percent-clipped=4.0 +2023-03-27 16:14:30,613 INFO [train.py:892] (0/4) Epoch 2, batch 750, loss[loss=0.4058, simple_loss=0.4007, pruned_loss=0.2054, over 19772.00 frames. ], tot_loss[loss=0.421, simple_loss=0.4155, pruned_loss=0.2133, over 3858509.38 frames. ], batch size: 198, lr: 4.62e-02, grad_scale: 16.0 +2023-03-27 16:15:54,423 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7344, 4.0921, 4.7108, 4.2193, 4.6294, 3.7225, 4.1179, 4.4797], + device='cuda:0'), covar=tensor([0.0179, 0.0233, 0.0163, 0.0255, 0.0187, 0.0431, 0.0304, 0.0200], + device='cuda:0'), in_proj_covar=tensor([0.0036, 0.0036, 0.0039, 0.0044, 0.0042, 0.0049, 0.0041, 0.0037], + device='cuda:0'), out_proj_covar=tensor([3.9179e-05, 3.9126e-05, 4.1256e-05, 4.7800e-05, 4.7945e-05, 5.2895e-05, + 4.3560e-05, 3.9439e-05], device='cuda:0') +2023-03-27 16:16:13,084 INFO [train.py:892] (0/4) Epoch 2, batch 800, loss[loss=0.5647, simple_loss=0.5242, pruned_loss=0.3027, over 19623.00 frames. ], tot_loss[loss=0.4192, simple_loss=0.4147, pruned_loss=0.2119, over 3879164.90 frames. ], batch size: 367, lr: 4.61e-02, grad_scale: 16.0 +2023-03-27 16:16:16,008 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9268, 2.7031, 1.8859, 2.8348, 2.9857, 1.6323, 2.5586, 2.4661], + device='cuda:0'), covar=tensor([0.0246, 0.0456, 0.1759, 0.0143, 0.0146, 0.2057, 0.0281, 0.0190], + device='cuda:0'), in_proj_covar=tensor([0.0047, 0.0047, 0.0091, 0.0029, 0.0032, 0.0101, 0.0049, 0.0042], + device='cuda:0'), out_proj_covar=tensor([3.4972e-05, 4.0685e-05, 8.2672e-05, 2.2415e-05, 2.4141e-05, 8.9555e-05, + 3.8771e-05, 2.9429e-05], device='cuda:0') +2023-03-27 16:17:11,639 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2685.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 16:17:33,384 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2696.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 16:17:37,777 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.91 vs. limit=2.0 +2023-03-27 16:17:45,222 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.299e+02 7.441e+02 9.827e+02 1.217e+03 2.716e+03, threshold=1.965e+03, percent-clipped=6.0 +2023-03-27 16:17:51,042 INFO [train.py:892] (0/4) Epoch 2, batch 850, loss[loss=0.3921, simple_loss=0.4112, pruned_loss=0.1866, over 19558.00 frames. ], tot_loss[loss=0.4208, simple_loss=0.4161, pruned_loss=0.2127, over 3894192.74 frames. ], batch size: 60, lr: 4.60e-02, grad_scale: 16.0 +2023-03-27 16:19:12,066 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2746.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 16:19:32,086 INFO [train.py:892] (0/4) Epoch 2, batch 900, loss[loss=0.3643, simple_loss=0.3745, pruned_loss=0.177, over 19643.00 frames. ], tot_loss[loss=0.4169, simple_loss=0.4136, pruned_loss=0.21, over 3906375.01 frames. ], batch size: 47, lr: 4.59e-02, grad_scale: 16.0 +2023-03-27 16:20:57,668 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6924, 2.0063, 3.1060, 2.7584, 4.0289, 3.4087, 3.8596, 3.7690], + device='cuda:0'), covar=tensor([0.0209, 0.1761, 0.0347, 0.1237, 0.0169, 0.0248, 0.0121, 0.0097], + device='cuda:0'), in_proj_covar=tensor([0.0044, 0.0088, 0.0043, 0.0090, 0.0036, 0.0046, 0.0037, 0.0039], + device='cuda:0'), out_proj_covar=tensor([3.7141e-05, 7.8085e-05, 3.5354e-05, 8.2637e-05, 3.1119e-05, 3.8403e-05, + 2.9880e-05, 2.8746e-05], device='cuda:0') +2023-03-27 16:21:04,236 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.961e+02 6.224e+02 7.989e+02 1.015e+03 2.345e+03, threshold=1.598e+03, percent-clipped=4.0 +2023-03-27 16:21:10,174 INFO [train.py:892] (0/4) Epoch 2, batch 950, loss[loss=0.4419, simple_loss=0.4327, pruned_loss=0.2255, over 19675.00 frames. ], tot_loss[loss=0.4165, simple_loss=0.4135, pruned_loss=0.2098, over 3916590.42 frames. ], batch size: 64, lr: 4.58e-02, grad_scale: 16.0 +2023-03-27 16:21:14,725 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8859, 4.4449, 4.2993, 4.7804, 4.5089, 4.9679, 4.6042, 5.0177], + device='cuda:0'), covar=tensor([0.0249, 0.0294, 0.0405, 0.0178, 0.0388, 0.0108, 0.0199, 0.0396], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0078, 0.0087, 0.0071, 0.0080, 0.0064, 0.0086, 0.0083], + device='cuda:0'), out_proj_covar=tensor([7.1897e-05, 9.6127e-05, 1.0663e-04, 8.5989e-05, 1.0057e-04, 8.1117e-05, + 9.4995e-05, 1.0171e-04], device='cuda:0') +2023-03-27 16:21:45,471 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2824.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 16:22:47,332 INFO [train.py:892] (0/4) Epoch 2, batch 1000, loss[loss=0.3865, simple_loss=0.3918, pruned_loss=0.1906, over 19872.00 frames. ], tot_loss[loss=0.4132, simple_loss=0.4112, pruned_loss=0.2076, over 3924733.15 frames. ], batch size: 108, lr: 4.57e-02, grad_scale: 16.0 +2023-03-27 16:23:47,312 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2885.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 16:24:07,308 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.7964, 5.9779, 5.8746, 5.8497, 5.8466, 5.9703, 5.3028, 5.2536], + device='cuda:0'), covar=tensor([0.0358, 0.0255, 0.0397, 0.0279, 0.0267, 0.0415, 0.0380, 0.0573], + device='cuda:0'), in_proj_covar=tensor([0.0075, 0.0073, 0.0086, 0.0081, 0.0084, 0.0070, 0.0091, 0.0113], + device='cuda:0'), out_proj_covar=tensor([8.8484e-05, 8.4674e-05, 9.8326e-05, 9.4178e-05, 9.3869e-05, 7.9121e-05, + 1.0060e-04, 1.3340e-04], device='cuda:0') +2023-03-27 16:24:23,437 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.209e+02 6.834e+02 8.062e+02 1.027e+03 2.036e+03, threshold=1.612e+03, percent-clipped=4.0 +2023-03-27 16:24:28,998 INFO [train.py:892] (0/4) Epoch 2, batch 1050, loss[loss=0.419, simple_loss=0.4308, pruned_loss=0.2036, over 19697.00 frames. ], tot_loss[loss=0.4111, simple_loss=0.4105, pruned_loss=0.2058, over 3926893.18 frames. ], batch size: 82, lr: 4.56e-02, grad_scale: 16.0 +2023-03-27 16:25:29,911 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2937.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 16:26:09,029 INFO [train.py:892] (0/4) Epoch 2, batch 1100, loss[loss=0.4029, simple_loss=0.4234, pruned_loss=0.1912, over 19866.00 frames. ], tot_loss[loss=0.4099, simple_loss=0.4098, pruned_loss=0.205, over 3930097.92 frames. ], batch size: 58, lr: 4.55e-02, grad_scale: 16.0 +2023-03-27 16:26:31,623 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.2272, 2.6664, 2.8573, 2.8017, 2.2497, 2.0747, 2.4144, 2.0822], + device='cuda:0'), covar=tensor([0.0832, 0.0230, 0.0222, 0.0196, 0.0708, 0.0527, 0.0334, 0.0314], + device='cuda:0'), in_proj_covar=tensor([0.0034, 0.0027, 0.0028, 0.0026, 0.0033, 0.0029, 0.0029, 0.0027], + device='cuda:0'), out_proj_covar=tensor([3.3045e-05, 2.5653e-05, 2.6787e-05, 2.2688e-05, 3.2406e-05, 2.9527e-05, + 2.6645e-05, 2.5701e-05], device='cuda:0') +2023-03-27 16:27:28,883 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2996.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 16:27:32,714 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2998.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-27 16:27:41,290 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.835e+02 7.115e+02 9.003e+02 1.112e+03 2.091e+03, threshold=1.801e+03, percent-clipped=6.0 +2023-03-27 16:27:46,916 INFO [train.py:892] (0/4) Epoch 2, batch 1150, loss[loss=0.3566, simple_loss=0.3807, pruned_loss=0.1663, over 19689.00 frames. ], tot_loss[loss=0.4089, simple_loss=0.4093, pruned_loss=0.2042, over 3933485.23 frames. ], batch size: 56, lr: 4.54e-02, grad_scale: 16.0 +2023-03-27 16:28:17,740 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3019.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 16:29:00,171 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3041.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:29:00,682 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.66 vs. limit=2.0 +2023-03-27 16:29:05,918 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3044.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 16:29:18,815 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3748, 3.9001, 4.1872, 3.7251, 4.0569, 4.1961, 3.6951, 4.3784], + device='cuda:0'), covar=tensor([0.2222, 0.0374, 0.0322, 0.0472, 0.0275, 0.0310, 0.0361, 0.0268], + device='cuda:0'), in_proj_covar=tensor([0.0125, 0.0093, 0.0080, 0.0083, 0.0079, 0.0083, 0.0079, 0.0071], + device='cuda:0'), out_proj_covar=tensor([1.4768e-04, 1.1474e-04, 1.0342e-04, 1.0150e-04, 9.3693e-05, 1.0351e-04, + 9.7961e-05, 8.8961e-05], device='cuda:0') +2023-03-27 16:29:30,567 INFO [train.py:892] (0/4) Epoch 2, batch 1200, loss[loss=0.3968, simple_loss=0.3975, pruned_loss=0.198, over 19791.00 frames. ], tot_loss[loss=0.406, simple_loss=0.4077, pruned_loss=0.2022, over 3936444.42 frames. ], batch size: 120, lr: 4.53e-02, grad_scale: 16.0 +2023-03-27 16:30:18,000 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3080.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 16:31:04,251 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.272e+02 6.636e+02 8.301e+02 1.092e+03 3.055e+03, threshold=1.660e+03, percent-clipped=1.0 +2023-03-27 16:31:10,197 INFO [train.py:892] (0/4) Epoch 2, batch 1250, loss[loss=0.3185, simple_loss=0.3435, pruned_loss=0.1468, over 19802.00 frames. ], tot_loss[loss=0.3998, simple_loss=0.4031, pruned_loss=0.1982, over 3940611.85 frames. ], batch size: 114, lr: 4.52e-02, grad_scale: 16.0 +2023-03-27 16:32:10,099 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6013, 1.5346, 2.6496, 2.4794, 2.5242, 2.3264, 2.3412, 2.4254], + device='cuda:0'), covar=tensor([0.0230, 0.1271, 0.0214, 0.0212, 0.0270, 0.0216, 0.0324, 0.0506], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0065, 0.0040, 0.0035, 0.0038, 0.0042, 0.0040, 0.0044], + device='cuda:0'), out_proj_covar=tensor([3.6089e-05, 6.9937e-05, 3.5362e-05, 3.2435e-05, 3.8439e-05, 3.9408e-05, + 4.0247e-05, 4.6892e-05], device='cuda:0') +2023-03-27 16:32:49,631 INFO [train.py:892] (0/4) Epoch 2, batch 1300, loss[loss=0.3607, simple_loss=0.3706, pruned_loss=0.1754, over 19757.00 frames. ], tot_loss[loss=0.3999, simple_loss=0.4036, pruned_loss=0.1981, over 3942153.62 frames. ], batch size: 97, lr: 4.51e-02, grad_scale: 16.0 +2023-03-27 16:33:37,244 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3180.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:34:05,497 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-27 16:34:23,392 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.299e+02 7.276e+02 8.986e+02 1.118e+03 2.110e+03, threshold=1.797e+03, percent-clipped=4.0 +2023-03-27 16:34:28,595 INFO [train.py:892] (0/4) Epoch 2, batch 1350, loss[loss=0.3886, simple_loss=0.3836, pruned_loss=0.1968, over 19736.00 frames. ], tot_loss[loss=0.3976, simple_loss=0.4024, pruned_loss=0.1964, over 3942529.29 frames. ], batch size: 71, lr: 4.50e-02, grad_scale: 16.0 +2023-03-27 16:34:49,485 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6369, 3.8762, 4.2166, 3.8799, 4.0996, 4.1511, 3.8198, 4.3636], + device='cuda:0'), covar=tensor([0.1453, 0.0369, 0.0249, 0.0327, 0.0254, 0.0268, 0.0306, 0.0244], + device='cuda:0'), in_proj_covar=tensor([0.0136, 0.0096, 0.0084, 0.0085, 0.0083, 0.0085, 0.0080, 0.0074], + device='cuda:0'), out_proj_covar=tensor([1.6252e-04, 1.2313e-04, 1.1112e-04, 1.0643e-04, 1.0235e-04, 1.0943e-04, + 1.0091e-04, 9.5351e-05], device='cuda:0') +2023-03-27 16:36:05,219 INFO [train.py:892] (0/4) Epoch 2, batch 1400, loss[loss=0.3594, simple_loss=0.3768, pruned_loss=0.171, over 19660.00 frames. ], tot_loss[loss=0.3965, simple_loss=0.4013, pruned_loss=0.1958, over 3944960.68 frames. ], batch size: 43, lr: 4.49e-02, grad_scale: 16.0 +2023-03-27 16:37:09,980 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3288.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 16:37:18,858 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3293.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:37:38,113 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.923e+02 7.473e+02 1.007e+03 1.301e+03 1.953e+03, threshold=2.013e+03, percent-clipped=1.0 +2023-03-27 16:37:43,374 INFO [train.py:892] (0/4) Epoch 2, batch 1450, loss[loss=0.3619, simple_loss=0.3787, pruned_loss=0.1726, over 19859.00 frames. ], tot_loss[loss=0.3949, simple_loss=0.4002, pruned_loss=0.1948, over 3946376.53 frames. ], batch size: 104, lr: 4.48e-02, grad_scale: 16.0 +2023-03-27 16:38:06,151 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.9880, 2.3647, 1.6689, 2.0506, 2.3448, 1.5726, 2.3001, 2.2318], + device='cuda:0'), covar=tensor([0.0735, 0.0432, 0.0796, 0.0237, 0.0403, 0.0746, 0.0602, 0.0896], + device='cuda:0'), in_proj_covar=tensor([0.0025, 0.0027, 0.0031, 0.0027, 0.0029, 0.0029, 0.0026, 0.0029], + device='cuda:0'), out_proj_covar=tensor([2.8023e-05, 2.9404e-05, 3.5797e-05, 2.9109e-05, 3.3406e-05, 3.3746e-05, + 3.0514e-05, 3.4708e-05], device='cuda:0') +2023-03-27 16:38:20,578 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6918, 4.1431, 4.6497, 4.1764, 4.6274, 3.7715, 3.9980, 4.3815], + device='cuda:0'), covar=tensor([0.0148, 0.0187, 0.0101, 0.0206, 0.0137, 0.0365, 0.0546, 0.0175], + device='cuda:0'), in_proj_covar=tensor([0.0041, 0.0043, 0.0046, 0.0050, 0.0049, 0.0058, 0.0056, 0.0044], + device='cuda:0'), out_proj_covar=tensor([5.4546e-05, 6.2532e-05, 5.9937e-05, 6.9450e-05, 7.2453e-05, 7.8610e-05, + 8.0142e-05, 5.7993e-05], device='cuda:0') +2023-03-27 16:38:50,398 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.82 vs. limit=5.0 +2023-03-27 16:38:55,741 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3341.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:39:13,425 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7108, 4.4725, 4.7818, 4.1507, 4.5521, 4.7215, 4.3814, 5.0425], + device='cuda:0'), covar=tensor([0.2285, 0.0336, 0.0285, 0.0348, 0.0282, 0.0251, 0.0237, 0.0215], + device='cuda:0'), in_proj_covar=tensor([0.0150, 0.0099, 0.0088, 0.0088, 0.0087, 0.0088, 0.0082, 0.0077], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-27 16:39:13,524 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3349.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-27 16:39:26,372 INFO [train.py:892] (0/4) Epoch 2, batch 1500, loss[loss=0.3312, simple_loss=0.3445, pruned_loss=0.159, over 19783.00 frames. ], tot_loss[loss=0.3931, simple_loss=0.399, pruned_loss=0.1936, over 3947410.75 frames. ], batch size: 168, lr: 4.47e-02, grad_scale: 16.0 +2023-03-27 16:40:03,786 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3375.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:40:30,526 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1270, 3.7297, 3.6505, 4.0524, 3.8558, 3.9938, 3.9585, 4.2503], + device='cuda:0'), covar=tensor([0.0242, 0.0284, 0.0400, 0.0185, 0.0365, 0.0255, 0.0257, 0.0254], + device='cuda:0'), in_proj_covar=tensor([0.0072, 0.0083, 0.0090, 0.0075, 0.0084, 0.0063, 0.0089, 0.0093], + device='cuda:0'), out_proj_covar=tensor([8.3518e-05, 1.1342e-04, 1.1773e-04, 9.8930e-05, 1.1850e-04, 8.5936e-05, + 1.0749e-04, 1.2353e-04], device='cuda:0') +2023-03-27 16:40:34,009 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3389.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:40:59,889 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.490e+02 6.824e+02 8.232e+02 9.819e+02 1.797e+03, threshold=1.646e+03, percent-clipped=0.0 +2023-03-27 16:41:05,677 INFO [train.py:892] (0/4) Epoch 2, batch 1550, loss[loss=0.4097, simple_loss=0.4072, pruned_loss=0.2061, over 19818.00 frames. ], tot_loss[loss=0.3898, simple_loss=0.3973, pruned_loss=0.1912, over 3947586.50 frames. ], batch size: 229, lr: 4.46e-02, grad_scale: 16.0 +2023-03-27 16:42:46,607 INFO [train.py:892] (0/4) Epoch 2, batch 1600, loss[loss=0.3947, simple_loss=0.4169, pruned_loss=0.1862, over 19580.00 frames. ], tot_loss[loss=0.3898, simple_loss=0.3976, pruned_loss=0.191, over 3947489.20 frames. ], batch size: 49, lr: 4.45e-02, grad_scale: 16.0 +2023-03-27 16:43:35,506 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3480.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:44:24,962 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.425e+02 7.682e+02 9.382e+02 1.262e+03 3.177e+03, threshold=1.876e+03, percent-clipped=11.0 +2023-03-27 16:44:28,481 INFO [train.py:892] (0/4) Epoch 2, batch 1650, loss[loss=0.3809, simple_loss=0.3786, pruned_loss=0.1916, over 19821.00 frames. ], tot_loss[loss=0.3865, simple_loss=0.3958, pruned_loss=0.1886, over 3948439.84 frames. ], batch size: 127, lr: 4.44e-02, grad_scale: 8.0 +2023-03-27 16:45:11,861 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3528.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:45:50,187 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3546.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 16:46:07,498 INFO [train.py:892] (0/4) Epoch 2, batch 1700, loss[loss=0.3439, simple_loss=0.3623, pruned_loss=0.1628, over 19769.00 frames. ], tot_loss[loss=0.3866, simple_loss=0.3954, pruned_loss=0.1889, over 3949885.26 frames. ], batch size: 182, lr: 4.43e-02, grad_scale: 8.0 +2023-03-27 16:47:22,557 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3593.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:47:42,005 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.530e+02 9.023e+02 1.103e+03 1.410e+03 2.321e+03, threshold=2.205e+03, percent-clipped=10.0 +2023-03-27 16:47:45,755 INFO [train.py:892] (0/4) Epoch 2, batch 1750, loss[loss=0.3657, simple_loss=0.3767, pruned_loss=0.1773, over 19834.00 frames. ], tot_loss[loss=0.3839, simple_loss=0.3937, pruned_loss=0.1871, over 3949111.14 frames. ], batch size: 75, lr: 4.42e-02, grad_scale: 8.0 +2023-03-27 16:47:48,521 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3607.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-27 16:48:50,033 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3641.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:48:55,205 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3644.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 16:49:15,997 INFO [train.py:892] (0/4) Epoch 2, batch 1800, loss[loss=0.3988, simple_loss=0.4075, pruned_loss=0.195, over 19955.00 frames. ], tot_loss[loss=0.3823, simple_loss=0.3922, pruned_loss=0.1862, over 3949550.11 frames. ], batch size: 53, lr: 4.41e-02, grad_scale: 8.0 +2023-03-27 16:49:49,836 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3675.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:49:51,467 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3676.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 16:50:07,998 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.2512, 2.0222, 2.4177, 1.1891, 2.0790, 2.3585, 1.8966, 2.0996], + device='cuda:0'), covar=tensor([0.0191, 0.0318, 0.0140, 0.0966, 0.0405, 0.0235, 0.0306, 0.0438], + device='cuda:0'), in_proj_covar=tensor([0.0019, 0.0022, 0.0020, 0.0028, 0.0025, 0.0022, 0.0019, 0.0021], + device='cuda:0'), out_proj_covar=tensor([2.2391e-05, 2.5265e-05, 2.2381e-05, 3.3240e-05, 2.7415e-05, 2.4588e-05, + 2.3080e-05, 2.4886e-05], device='cuda:0') +2023-03-27 16:50:14,494 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1280, 3.1275, 3.3644, 2.7069, 2.8942, 2.2563, 3.2187, 3.2203], + device='cuda:0'), covar=tensor([0.0609, 0.0258, 0.0551, 0.0378, 0.0709, 0.0457, 0.0568, 0.0403], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0030, 0.0032, 0.0034, 0.0037, 0.0029, 0.0029, 0.0029], + device='cuda:0'), out_proj_covar=tensor([4.6613e-05, 3.5675e-05, 3.7858e-05, 3.8385e-05, 4.3827e-05, 3.6722e-05, + 3.5370e-05, 3.5987e-05], device='cuda:0') +2023-03-27 16:50:37,047 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.302e+02 8.227e+02 1.028e+03 1.238e+03 2.427e+03, threshold=2.056e+03, percent-clipped=3.0 +2023-03-27 16:50:40,373 INFO [train.py:892] (0/4) Epoch 2, batch 1850, loss[loss=0.3372, simple_loss=0.3796, pruned_loss=0.1474, over 19828.00 frames. ], tot_loss[loss=0.3789, simple_loss=0.391, pruned_loss=0.1834, over 3947591.43 frames. ], batch size: 57, lr: 4.39e-02, grad_scale: 8.0 +2023-03-27 16:50:47,422 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-2.pt +2023-03-27 16:51:36,780 INFO [train.py:892] (0/4) Epoch 3, batch 0, loss[loss=0.4145, simple_loss=0.4093, pruned_loss=0.2099, over 19753.00 frames. ], tot_loss[loss=0.4145, simple_loss=0.4093, pruned_loss=0.2099, over 19753.00 frames. ], batch size: 273, lr: 4.17e-02, grad_scale: 8.0 +2023-03-27 16:51:36,781 INFO [train.py:917] (0/4) Computing validation loss +2023-03-27 16:52:02,989 INFO [train.py:926] (0/4) Epoch 3, validation: loss=0.2594, simple_loss=0.3267, pruned_loss=0.09605, over 2883724.00 frames. +2023-03-27 16:52:02,990 INFO [train.py:927] (0/4) Maximum memory allocated so far is 20927MB +2023-03-27 16:52:13,657 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-27 16:52:30,184 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3723.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 16:52:35,927 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.4400, 4.9591, 4.8800, 5.4029, 5.0131, 5.5472, 5.0919, 5.5994], + device='cuda:0'), covar=tensor([0.0216, 0.0196, 0.0270, 0.0141, 0.0285, 0.0076, 0.0217, 0.0290], + device='cuda:0'), in_proj_covar=tensor([0.0079, 0.0088, 0.0098, 0.0079, 0.0091, 0.0068, 0.0096, 0.0102], + device='cuda:0'), out_proj_covar=tensor([9.7024e-05, 1.2423e-04, 1.3357e-04, 1.0757e-04, 1.3505e-04, 9.6859e-05, + 1.2175e-04, 1.3880e-04], device='cuda:0') +2023-03-27 16:52:57,971 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3737.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-27 16:53:06,085 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3864, 2.8617, 3.8543, 3.6937, 3.6311, 4.1177, 3.2090, 3.2443], + device='cuda:0'), covar=tensor([0.1171, 0.6327, 0.0496, 0.0614, 0.1671, 0.0366, 0.1093, 0.1337], + device='cuda:0'), in_proj_covar=tensor([0.0099, 0.0150, 0.0064, 0.0062, 0.0112, 0.0057, 0.0074, 0.0092], + device='cuda:0'), out_proj_covar=tensor([9.4932e-05, 1.5780e-04, 5.7525e-05, 5.2399e-05, 1.1044e-04, 5.5584e-05, + 6.9817e-05, 8.6726e-05], device='cuda:0') +2023-03-27 16:53:09,258 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0169, 2.3093, 2.6336, 2.5806, 3.1248, 2.5575, 3.2528, 2.9916], + device='cuda:0'), covar=tensor([0.0703, 0.1731, 0.0868, 0.1384, 0.0520, 0.0499, 0.0283, 0.0442], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0087, 0.0052, 0.0092, 0.0038, 0.0055, 0.0038, 0.0042], + device='cuda:0'), out_proj_covar=tensor([4.7786e-05, 7.7656e-05, 4.6872e-05, 8.5042e-05, 3.7008e-05, 4.8666e-05, + 3.2150e-05, 3.4049e-05], device='cuda:0') +2023-03-27 16:53:43,483 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4455, 2.2169, 2.1101, 1.9871, 1.7304, 1.5922, 1.9756, 2.1859], + device='cuda:0'), covar=tensor([0.0247, 0.0338, 0.0518, 0.0215, 0.0556, 0.0719, 0.0498, 0.0443], + device='cuda:0'), in_proj_covar=tensor([0.0026, 0.0030, 0.0031, 0.0027, 0.0029, 0.0030, 0.0031, 0.0030], + device='cuda:0'), out_proj_covar=tensor([3.3096e-05, 3.5524e-05, 4.0764e-05, 3.2825e-05, 3.8197e-05, 3.8755e-05, + 3.9230e-05, 3.9470e-05], device='cuda:0') +2023-03-27 16:53:50,426 INFO [train.py:892] (0/4) Epoch 3, batch 50, loss[loss=0.3472, simple_loss=0.3665, pruned_loss=0.1639, over 19835.00 frames. ], tot_loss[loss=0.3631, simple_loss=0.3789, pruned_loss=0.1736, over 891485.70 frames. ], batch size: 75, lr: 4.16e-02, grad_scale: 8.0 +2023-03-27 16:54:02,931 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.62 vs. limit=2.0 +2023-03-27 16:55:16,582 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.227e+02 7.628e+02 9.313e+02 1.232e+03 3.200e+03, threshold=1.863e+03, percent-clipped=4.0 +2023-03-27 16:55:31,486 INFO [train.py:892] (0/4) Epoch 3, batch 100, loss[loss=0.3635, simple_loss=0.3845, pruned_loss=0.1713, over 19723.00 frames. ], tot_loss[loss=0.3663, simple_loss=0.3807, pruned_loss=0.176, over 1568891.07 frames. ], batch size: 62, lr: 4.15e-02, grad_scale: 8.0 +2023-03-27 16:57:14,245 INFO [train.py:892] (0/4) Epoch 3, batch 150, loss[loss=0.4348, simple_loss=0.4292, pruned_loss=0.2202, over 19651.00 frames. ], tot_loss[loss=0.3614, simple_loss=0.3774, pruned_loss=0.1726, over 2097199.20 frames. ], batch size: 330, lr: 4.14e-02, grad_scale: 8.0 +2023-03-27 16:58:41,899 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3902.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 16:58:44,810 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.184e+02 7.831e+02 9.821e+02 1.147e+03 2.106e+03, threshold=1.964e+03, percent-clipped=1.0 +2023-03-27 16:58:59,047 INFO [train.py:892] (0/4) Epoch 3, batch 200, loss[loss=0.4837, simple_loss=0.4608, pruned_loss=0.2533, over 19707.00 frames. ], tot_loss[loss=0.363, simple_loss=0.3785, pruned_loss=0.1737, over 2508804.35 frames. ], batch size: 315, lr: 4.13e-02, grad_scale: 8.0 +2023-03-27 16:59:30,178 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3926.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:00:09,626 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3944.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 17:00:41,361 INFO [train.py:892] (0/4) Epoch 3, batch 250, loss[loss=0.2947, simple_loss=0.336, pruned_loss=0.1267, over 19697.00 frames. ], tot_loss[loss=0.3625, simple_loss=0.3785, pruned_loss=0.1733, over 2827804.15 frames. ], batch size: 59, lr: 4.12e-02, grad_scale: 8.0 +2023-03-27 17:01:37,848 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3987.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:01:47,628 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3992.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 17:02:04,199 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-4000.pt +2023-03-27 17:02:15,831 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.914e+02 7.402e+02 9.885e+02 1.286e+03 2.496e+03, threshold=1.977e+03, percent-clipped=5.0 +2023-03-27 17:02:31,808 INFO [train.py:892] (0/4) Epoch 3, batch 300, loss[loss=0.2943, simple_loss=0.3425, pruned_loss=0.1231, over 19803.00 frames. ], tot_loss[loss=0.3617, simple_loss=0.3784, pruned_loss=0.1725, over 3077506.09 frames. ], batch size: 51, lr: 4.11e-02, grad_scale: 8.0 +2023-03-27 17:03:15,073 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4032.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 17:04:16,801 INFO [train.py:892] (0/4) Epoch 3, batch 350, loss[loss=0.352, simple_loss=0.3569, pruned_loss=0.1735, over 19872.00 frames. ], tot_loss[loss=0.3616, simple_loss=0.3782, pruned_loss=0.1725, over 3270070.45 frames. ], batch size: 138, lr: 4.10e-02, grad_scale: 8.0 +2023-03-27 17:04:46,069 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.2623, 2.2426, 1.7555, 2.4329, 2.6485, 2.6325, 2.5862, 2.4700], + device='cuda:0'), covar=tensor([0.0496, 0.0410, 0.2096, 0.0465, 0.0324, 0.0300, 0.0355, 0.0235], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0042, 0.0080, 0.0043, 0.0045, 0.0040, 0.0042, 0.0038], + device='cuda:0'), out_proj_covar=tensor([5.9220e-05, 5.3213e-05, 1.0889e-04, 5.5489e-05, 5.3906e-05, 5.2932e-05, + 5.3534e-05, 4.8552e-05], device='cuda:0') +2023-03-27 17:05:40,873 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.06 vs. limit=2.0 +2023-03-27 17:05:45,543 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.173e+02 7.083e+02 8.600e+02 1.034e+03 2.171e+03, threshold=1.720e+03, percent-clipped=1.0 +2023-03-27 17:05:59,475 INFO [train.py:892] (0/4) Epoch 3, batch 400, loss[loss=0.3604, simple_loss=0.3725, pruned_loss=0.1741, over 19817.00 frames. ], tot_loss[loss=0.3656, simple_loss=0.3811, pruned_loss=0.1751, over 3419572.84 frames. ], batch size: 133, lr: 4.09e-02, grad_scale: 8.0 +2023-03-27 17:07:02,394 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4141.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:07:46,004 INFO [train.py:892] (0/4) Epoch 3, batch 450, loss[loss=0.2988, simple_loss=0.3381, pruned_loss=0.1297, over 19880.00 frames. ], tot_loss[loss=0.3641, simple_loss=0.3797, pruned_loss=0.1743, over 3538228.61 frames. ], batch size: 47, lr: 4.08e-02, grad_scale: 8.0 +2023-03-27 17:09:11,893 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4202.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 17:09:12,052 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4202.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:09:14,943 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.069e+02 7.856e+02 9.394e+02 1.124e+03 2.140e+03, threshold=1.879e+03, percent-clipped=3.0 +2023-03-27 17:09:28,187 INFO [train.py:892] (0/4) Epoch 3, batch 500, loss[loss=0.3422, simple_loss=0.3513, pruned_loss=0.1666, over 19733.00 frames. ], tot_loss[loss=0.3602, simple_loss=0.3766, pruned_loss=0.1719, over 3630979.62 frames. ], batch size: 134, lr: 4.07e-02, grad_scale: 8.0 +2023-03-27 17:10:50,178 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4250.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 17:11:06,841 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-27 17:11:12,796 INFO [train.py:892] (0/4) Epoch 3, batch 550, loss[loss=0.3208, simple_loss=0.3494, pruned_loss=0.1461, over 19396.00 frames. ], tot_loss[loss=0.3593, simple_loss=0.3756, pruned_loss=0.1715, over 3701417.57 frames. ], batch size: 40, lr: 4.06e-02, grad_scale: 8.0 +2023-03-27 17:11:34,580 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.79 vs. limit=5.0 +2023-03-27 17:11:45,529 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=5.04 vs. limit=5.0 +2023-03-27 17:11:56,595 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4282.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:12:08,221 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4288.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:12:42,162 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.575e+02 7.145e+02 8.933e+02 1.130e+03 1.909e+03, threshold=1.787e+03, percent-clipped=2.0 +2023-03-27 17:12:57,542 INFO [train.py:892] (0/4) Epoch 3, batch 600, loss[loss=0.3394, simple_loss=0.3501, pruned_loss=0.1644, over 19822.00 frames. ], tot_loss[loss=0.3582, simple_loss=0.3752, pruned_loss=0.1706, over 3756014.39 frames. ], batch size: 187, lr: 4.05e-02, grad_scale: 8.0 +2023-03-27 17:13:41,283 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4332.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 17:14:16,481 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4349.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:14:38,632 INFO [train.py:892] (0/4) Epoch 3, batch 650, loss[loss=0.3673, simple_loss=0.3771, pruned_loss=0.1787, over 19796.00 frames. ], tot_loss[loss=0.3554, simple_loss=0.3732, pruned_loss=0.1688, over 3799694.04 frames. ], batch size: 236, lr: 4.04e-02, grad_scale: 8.0 +2023-03-27 17:15:00,748 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-27 17:15:21,795 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4380.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 17:15:43,058 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4196, 2.3082, 2.1859, 2.7604, 1.8584, 1.9705, 2.2504, 2.6677], + device='cuda:0'), covar=tensor([0.0467, 0.0486, 0.0500, 0.0165, 0.0533, 0.0552, 0.0418, 0.0670], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0030, 0.0033, 0.0027, 0.0030, 0.0032, 0.0031, 0.0031], + device='cuda:0'), out_proj_covar=tensor([4.0332e-05, 4.0392e-05, 4.7461e-05, 3.7337e-05, 4.3043e-05, 4.5535e-05, + 4.4575e-05, 4.4419e-05], device='cuda:0') +2023-03-27 17:15:52,338 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1846, 2.1521, 2.0487, 2.3960, 1.5911, 1.7115, 2.0457, 2.3664], + device='cuda:0'), covar=tensor([0.0331, 0.0351, 0.0532, 0.0199, 0.0573, 0.0504, 0.0542, 0.0566], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0030, 0.0033, 0.0027, 0.0030, 0.0032, 0.0032, 0.0031], + device='cuda:0'), out_proj_covar=tensor([4.0469e-05, 4.0649e-05, 4.7927e-05, 3.7524e-05, 4.3448e-05, 4.5735e-05, + 4.5043e-05, 4.4810e-05], device='cuda:0') +2023-03-27 17:16:08,631 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.540e+02 7.551e+02 9.171e+02 1.079e+03 2.038e+03, threshold=1.834e+03, percent-clipped=1.0 +2023-03-27 17:16:24,579 INFO [train.py:892] (0/4) Epoch 3, batch 700, loss[loss=0.4344, simple_loss=0.4245, pruned_loss=0.2222, over 19623.00 frames. ], tot_loss[loss=0.3541, simple_loss=0.3722, pruned_loss=0.168, over 3833976.09 frames. ], batch size: 351, lr: 4.03e-02, grad_scale: 8.0 +2023-03-27 17:17:48,884 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1417, 1.7854, 3.3584, 3.2425, 3.1358, 3.1110, 3.0833, 3.0317], + device='cuda:0'), covar=tensor([0.0217, 0.1893, 0.0158, 0.0178, 0.0253, 0.0208, 0.0242, 0.0502], + device='cuda:0'), in_proj_covar=tensor([0.0054, 0.0113, 0.0059, 0.0053, 0.0049, 0.0057, 0.0051, 0.0059], + device='cuda:0'), out_proj_covar=tensor([6.5704e-05, 1.3282e-04, 6.8524e-05, 6.4309e-05, 6.4987e-05, 6.8426e-05, + 6.5785e-05, 7.9273e-05], device='cuda:0') +2023-03-27 17:18:06,990 INFO [train.py:892] (0/4) Epoch 3, batch 750, loss[loss=0.3474, simple_loss=0.37, pruned_loss=0.1623, over 19810.00 frames. ], tot_loss[loss=0.3524, simple_loss=0.3708, pruned_loss=0.167, over 3859292.00 frames. ], batch size: 202, lr: 4.02e-02, grad_scale: 8.0 +2023-03-27 17:19:22,122 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4497.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:19:34,746 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.458e+02 7.939e+02 9.251e+02 1.113e+03 1.728e+03, threshold=1.850e+03, percent-clipped=0.0 +2023-03-27 17:19:48,317 INFO [train.py:892] (0/4) Epoch 3, batch 800, loss[loss=0.3813, simple_loss=0.4075, pruned_loss=0.1775, over 19677.00 frames. ], tot_loss[loss=0.3513, simple_loss=0.3702, pruned_loss=0.1662, over 3878617.50 frames. ], batch size: 52, lr: 4.01e-02, grad_scale: 8.0 +2023-03-27 17:20:12,038 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-27 17:21:33,146 INFO [train.py:892] (0/4) Epoch 3, batch 850, loss[loss=0.3151, simple_loss=0.3484, pruned_loss=0.1409, over 19796.00 frames. ], tot_loss[loss=0.3521, simple_loss=0.3712, pruned_loss=0.1665, over 3893398.64 frames. ], batch size: 79, lr: 4.00e-02, grad_scale: 8.0 +2023-03-27 17:22:13,933 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4582.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:22:58,522 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.263e+02 6.963e+02 8.935e+02 1.119e+03 2.174e+03, threshold=1.787e+03, percent-clipped=2.0 +2023-03-27 17:23:13,500 INFO [train.py:892] (0/4) Epoch 3, batch 900, loss[loss=0.3953, simple_loss=0.4059, pruned_loss=0.1923, over 19668.00 frames. ], tot_loss[loss=0.3503, simple_loss=0.3699, pruned_loss=0.1653, over 3905198.88 frames. ], batch size: 64, lr: 3.99e-02, grad_scale: 8.0 +2023-03-27 17:23:53,293 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4630.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:24:23,395 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4644.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:24:56,909 INFO [train.py:892] (0/4) Epoch 3, batch 950, loss[loss=0.3214, simple_loss=0.3513, pruned_loss=0.1458, over 19888.00 frames. ], tot_loss[loss=0.3485, simple_loss=0.369, pruned_loss=0.164, over 3916252.28 frames. ], batch size: 63, lr: 3.98e-02, grad_scale: 8.0 +2023-03-27 17:25:16,879 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7396, 2.2637, 2.8257, 2.3591, 2.4292, 3.0484, 2.1874, 2.2330], + device='cuda:0'), covar=tensor([0.0733, 0.4232, 0.0404, 0.0849, 0.1308, 0.0466, 0.0946, 0.1341], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0206, 0.0081, 0.0091, 0.0148, 0.0074, 0.0095, 0.0112], + device='cuda:0'), out_proj_covar=tensor([1.1501e-04, 2.2056e-04, 7.7802e-05, 8.3209e-05, 1.4888e-04, 7.8091e-05, + 9.3221e-05, 1.0739e-04], device='cuda:0') +2023-03-27 17:26:11,209 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5815, 2.6145, 2.1249, 3.0559, 3.0305, 3.0752, 3.3495, 3.2430], + device='cuda:0'), covar=tensor([0.0525, 0.0574, 0.1633, 0.0561, 0.0515, 0.0467, 0.0494, 0.0185], + device='cuda:0'), in_proj_covar=tensor([0.0056, 0.0046, 0.0082, 0.0049, 0.0049, 0.0042, 0.0045, 0.0040], + device='cuda:0'), out_proj_covar=tensor([7.4327e-05, 6.5206e-05, 1.1794e-04, 7.1670e-05, 6.7502e-05, 6.2430e-05, + 6.4887e-05, 5.7966e-05], device='cuda:0') +2023-03-27 17:26:26,127 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.384e+02 7.071e+02 8.023e+02 9.803e+02 2.669e+03, threshold=1.605e+03, percent-clipped=2.0 +2023-03-27 17:26:32,727 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.81 vs. limit=5.0 +2023-03-27 17:26:42,508 INFO [train.py:892] (0/4) Epoch 3, batch 1000, loss[loss=0.5101, simple_loss=0.492, pruned_loss=0.2641, over 19431.00 frames. ], tot_loss[loss=0.3492, simple_loss=0.37, pruned_loss=0.1641, over 3922270.59 frames. ], batch size: 412, lr: 3.97e-02, grad_scale: 8.0 +2023-03-27 17:26:49,618 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.79 vs. limit=5.0 +2023-03-27 17:28:22,547 INFO [train.py:892] (0/4) Epoch 3, batch 1050, loss[loss=0.381, simple_loss=0.3895, pruned_loss=0.1863, over 19800.00 frames. ], tot_loss[loss=0.3474, simple_loss=0.3684, pruned_loss=0.1632, over 3929809.43 frames. ], batch size: 200, lr: 3.96e-02, grad_scale: 8.0 +2023-03-27 17:28:29,543 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.09 vs. limit=2.0 +2023-03-27 17:29:37,259 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4797.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:29:50,536 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.737e+02 7.391e+02 9.077e+02 1.100e+03 2.003e+03, threshold=1.815e+03, percent-clipped=3.0 +2023-03-27 17:30:03,721 INFO [train.py:892] (0/4) Epoch 3, batch 1100, loss[loss=0.3079, simple_loss=0.3364, pruned_loss=0.1397, over 19531.00 frames. ], tot_loss[loss=0.3501, simple_loss=0.3706, pruned_loss=0.1647, over 3932892.40 frames. ], batch size: 46, lr: 3.95e-02, grad_scale: 8.0 +2023-03-27 17:30:40,800 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8836, 4.0736, 4.0537, 4.0321, 3.8965, 4.0488, 3.6272, 3.5732], + device='cuda:0'), covar=tensor([0.0446, 0.0364, 0.0695, 0.0493, 0.0522, 0.0501, 0.0551, 0.1066], + device='cuda:0'), in_proj_covar=tensor([0.0101, 0.0094, 0.0135, 0.0104, 0.0108, 0.0092, 0.0120, 0.0153], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 17:30:42,658 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2085, 3.8365, 3.8406, 4.2044, 3.8587, 4.2063, 4.1488, 4.4400], + device='cuda:0'), covar=tensor([0.0439, 0.0324, 0.0455, 0.0245, 0.0485, 0.0201, 0.0315, 0.0234], + device='cuda:0'), in_proj_covar=tensor([0.0087, 0.0093, 0.0101, 0.0086, 0.0092, 0.0072, 0.0101, 0.0108], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-03-27 17:31:15,742 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4845.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:31:50,574 INFO [train.py:892] (0/4) Epoch 3, batch 1150, loss[loss=0.2714, simple_loss=0.3165, pruned_loss=0.1131, over 19945.00 frames. ], tot_loss[loss=0.3477, simple_loss=0.3684, pruned_loss=0.1636, over 3937299.02 frames. ], batch size: 46, lr: 3.95e-02, grad_scale: 8.0 +2023-03-27 17:32:23,498 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.94 vs. limit=2.0 +2023-03-27 17:33:20,477 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.536e+02 7.167e+02 9.190e+02 1.175e+03 2.796e+03, threshold=1.838e+03, percent-clipped=7.0 +2023-03-27 17:33:28,837 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.5586, 2.1959, 1.6903, 1.7870, 1.7376, 1.5636, 1.7412, 2.0894], + device='cuda:0'), covar=tensor([0.0475, 0.0273, 0.0513, 0.0433, 0.0378, 0.0769, 0.0719, 0.0634], + device='cuda:0'), in_proj_covar=tensor([0.0025, 0.0027, 0.0030, 0.0026, 0.0027, 0.0030, 0.0031, 0.0028], + device='cuda:0'), out_proj_covar=tensor([3.8744e-05, 3.9431e-05, 4.6815e-05, 4.0333e-05, 4.2160e-05, 4.6065e-05, + 4.7188e-05, 4.3402e-05], device='cuda:0') +2023-03-27 17:33:33,646 INFO [train.py:892] (0/4) Epoch 3, batch 1200, loss[loss=0.3359, simple_loss=0.3555, pruned_loss=0.1582, over 19738.00 frames. ], tot_loss[loss=0.3481, simple_loss=0.369, pruned_loss=0.1636, over 3940070.82 frames. ], batch size: 77, lr: 3.94e-02, grad_scale: 8.0 +2023-03-27 17:33:36,614 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2395, 2.9716, 3.2281, 2.8374, 3.2209, 2.5078, 2.4998, 2.9054], + device='cuda:0'), covar=tensor([0.0200, 0.0224, 0.0162, 0.0222, 0.0227, 0.0771, 0.1034, 0.0331], + device='cuda:0'), in_proj_covar=tensor([0.0049, 0.0052, 0.0055, 0.0064, 0.0060, 0.0076, 0.0088, 0.0059], + device='cuda:0'), out_proj_covar=tensor([8.8650e-05, 1.0621e-04, 9.8809e-05, 1.1832e-04, 1.2370e-04, 1.4430e-04, + 1.5991e-04, 1.0808e-04], device='cuda:0') +2023-03-27 17:34:16,847 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8054, 2.4886, 3.1342, 3.1317, 3.8361, 3.1749, 3.5394, 3.7888], + device='cuda:0'), covar=tensor([0.0371, 0.1349, 0.0648, 0.1320, 0.0618, 0.0493, 0.0260, 0.0254], + device='cuda:0'), in_proj_covar=tensor([0.0078, 0.0109, 0.0081, 0.0125, 0.0060, 0.0082, 0.0060, 0.0064], + device='cuda:0'), out_proj_covar=tensor([8.5624e-05, 1.0908e-04, 8.7936e-05, 1.3032e-04, 6.9985e-05, 8.6192e-05, + 6.1306e-05, 6.6203e-05], device='cuda:0') +2023-03-27 17:34:43,367 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4944.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:35:18,127 INFO [train.py:892] (0/4) Epoch 3, batch 1250, loss[loss=0.3187, simple_loss=0.3583, pruned_loss=0.1396, over 19748.00 frames. ], tot_loss[loss=0.3478, simple_loss=0.3687, pruned_loss=0.1634, over 3942115.79 frames. ], batch size: 97, lr: 3.93e-02, grad_scale: 8.0 +2023-03-27 17:36:20,280 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4992.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 17:36:47,196 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.843e+02 6.781e+02 8.883e+02 1.088e+03 2.699e+03, threshold=1.777e+03, percent-clipped=1.0 +2023-03-27 17:37:00,636 INFO [train.py:892] (0/4) Epoch 3, batch 1300, loss[loss=0.3211, simple_loss=0.3524, pruned_loss=0.1449, over 19820.00 frames. ], tot_loss[loss=0.3453, simple_loss=0.3667, pruned_loss=0.162, over 3945103.59 frames. ], batch size: 103, lr: 3.92e-02, grad_scale: 8.0 +2023-03-27 17:38:43,018 INFO [train.py:892] (0/4) Epoch 3, batch 1350, loss[loss=0.329, simple_loss=0.3648, pruned_loss=0.1466, over 19845.00 frames. ], tot_loss[loss=0.3448, simple_loss=0.3667, pruned_loss=0.1614, over 3945910.36 frames. ], batch size: 59, lr: 3.91e-02, grad_scale: 8.0 +2023-03-27 17:38:43,968 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6389, 3.5338, 2.3688, 3.9575, 3.9263, 1.6470, 3.3506, 3.6336], + device='cuda:0'), covar=tensor([0.0305, 0.0539, 0.1699, 0.0108, 0.0089, 0.2317, 0.0460, 0.0164], + device='cuda:0'), in_proj_covar=tensor([0.0085, 0.0099, 0.0138, 0.0053, 0.0049, 0.0138, 0.0100, 0.0070], + device='cuda:0'), out_proj_covar=tensor([8.5520e-05, 1.0606e-04, 1.3465e-04, 5.5191e-05, 4.9899e-05, 1.3027e-04, + 1.0208e-04, 6.7826e-05], device='cuda:0') +2023-03-27 17:39:53,037 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.5297, 2.0388, 1.7499, 1.1214, 1.6907, 1.8301, 1.5006, 1.7697], + device='cuda:0'), covar=tensor([0.0291, 0.0331, 0.0302, 0.1098, 0.0461, 0.0456, 0.0350, 0.0336], + device='cuda:0'), in_proj_covar=tensor([0.0027, 0.0028, 0.0028, 0.0041, 0.0040, 0.0031, 0.0026, 0.0029], + device='cuda:0'), out_proj_covar=tensor([4.0955e-05, 4.3291e-05, 4.1239e-05, 6.2875e-05, 5.9884e-05, 4.6018e-05, + 4.2107e-05, 4.3920e-05], device='cuda:0') +2023-03-27 17:40:10,987 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 6.737e+02 7.871e+02 9.316e+02 1.602e+03, threshold=1.574e+03, percent-clipped=0.0 +2023-03-27 17:40:26,683 INFO [train.py:892] (0/4) Epoch 3, batch 1400, loss[loss=0.3354, simple_loss=0.3823, pruned_loss=0.1442, over 19674.00 frames. ], tot_loss[loss=0.3439, simple_loss=0.3657, pruned_loss=0.161, over 3947257.36 frames. ], batch size: 55, lr: 3.90e-02, grad_scale: 8.0 +2023-03-27 17:40:40,735 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.7482, 2.0130, 2.0511, 1.3924, 2.1895, 2.0639, 1.8097, 2.1210], + device='cuda:0'), covar=tensor([0.0264, 0.0926, 0.0266, 0.0849, 0.0402, 0.0487, 0.0479, 0.0510], + device='cuda:0'), in_proj_covar=tensor([0.0026, 0.0028, 0.0028, 0.0039, 0.0039, 0.0030, 0.0026, 0.0029], + device='cuda:0'), out_proj_covar=tensor([4.0659e-05, 4.3597e-05, 4.0121e-05, 6.2044e-05, 5.8889e-05, 4.5327e-05, + 4.1654e-05, 4.3993e-05], device='cuda:0') +2023-03-27 17:41:21,410 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4797, 2.0869, 2.6311, 2.7817, 3.4441, 2.9714, 3.4632, 3.6686], + device='cuda:0'), covar=tensor([0.0448, 0.1695, 0.0733, 0.1366, 0.0493, 0.0544, 0.0240, 0.0255], + device='cuda:0'), in_proj_covar=tensor([0.0084, 0.0116, 0.0085, 0.0132, 0.0066, 0.0086, 0.0062, 0.0069], + device='cuda:0'), out_proj_covar=tensor([9.4497e-05, 1.1675e-04, 9.5879e-05, 1.3898e-04, 7.8554e-05, 9.2823e-05, + 6.5466e-05, 7.4650e-05], device='cuda:0') +2023-03-27 17:42:06,538 INFO [train.py:892] (0/4) Epoch 3, batch 1450, loss[loss=0.3095, simple_loss=0.3506, pruned_loss=0.1342, over 19680.00 frames. ], tot_loss[loss=0.343, simple_loss=0.3654, pruned_loss=0.1603, over 3947696.93 frames. ], batch size: 49, lr: 3.89e-02, grad_scale: 8.0 +2023-03-27 17:42:37,847 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.2782, 2.3069, 3.0467, 2.5983, 2.9265, 2.3922, 2.6960, 3.3419], + device='cuda:0'), covar=tensor([0.0803, 0.0358, 0.0360, 0.0350, 0.0358, 0.0374, 0.0332, 0.0185], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0032, 0.0035, 0.0045, 0.0035, 0.0032, 0.0030, 0.0030], + device='cuda:0'), out_proj_covar=tensor([6.5961e-05, 5.4133e-05, 5.8055e-05, 6.8194e-05, 5.8543e-05, 5.5391e-05, + 5.0749e-05, 5.0524e-05], device='cuda:0') +2023-03-27 17:42:45,223 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-27 17:43:02,634 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.90 vs. limit=2.0 +2023-03-27 17:43:35,891 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 5.438e+02 7.354e+02 9.251e+02 1.233e+03 2.096e+03, threshold=1.850e+03, percent-clipped=6.0 +2023-03-27 17:43:49,221 INFO [train.py:892] (0/4) Epoch 3, batch 1500, loss[loss=0.4368, simple_loss=0.4333, pruned_loss=0.2201, over 19636.00 frames. ], tot_loss[loss=0.3446, simple_loss=0.3662, pruned_loss=0.1615, over 3947390.07 frames. ], batch size: 359, lr: 3.88e-02, grad_scale: 8.0 +2023-03-27 17:44:50,875 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1289, 3.7516, 3.7632, 4.1531, 3.8923, 4.0169, 4.0278, 4.3072], + device='cuda:0'), covar=tensor([0.0396, 0.0287, 0.0436, 0.0199, 0.0331, 0.0379, 0.0276, 0.0285], + device='cuda:0'), in_proj_covar=tensor([0.0091, 0.0096, 0.0107, 0.0090, 0.0095, 0.0077, 0.0103, 0.0114], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-03-27 17:45:34,257 INFO [train.py:892] (0/4) Epoch 3, batch 1550, loss[loss=0.3582, simple_loss=0.3731, pruned_loss=0.1716, over 19790.00 frames. ], tot_loss[loss=0.3426, simple_loss=0.3653, pruned_loss=0.16, over 3947766.07 frames. ], batch size: 120, lr: 3.87e-02, grad_scale: 8.0 +2023-03-27 17:46:50,830 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.02 vs. limit=2.0 +2023-03-27 17:47:02,178 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.559e+02 6.524e+02 7.733e+02 9.539e+02 1.385e+03, threshold=1.547e+03, percent-clipped=0.0 +2023-03-27 17:47:15,537 INFO [train.py:892] (0/4) Epoch 3, batch 1600, loss[loss=0.2954, simple_loss=0.3345, pruned_loss=0.1282, over 19792.00 frames. ], tot_loss[loss=0.3426, simple_loss=0.3659, pruned_loss=0.1596, over 3948234.02 frames. ], batch size: 73, lr: 3.86e-02, grad_scale: 8.0 +2023-03-27 17:48:41,290 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.94 vs. limit=5.0 +2023-03-27 17:48:56,941 INFO [train.py:892] (0/4) Epoch 3, batch 1650, loss[loss=0.3012, simple_loss=0.3393, pruned_loss=0.1316, over 19779.00 frames. ], tot_loss[loss=0.3392, simple_loss=0.3637, pruned_loss=0.1574, over 3947937.27 frames. ], batch size: 52, lr: 3.85e-02, grad_scale: 8.0 +2023-03-27 17:49:42,297 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3118, 3.2980, 4.6350, 3.8364, 3.9513, 4.5926, 3.3660, 3.3048], + device='cuda:0'), covar=tensor([0.0385, 0.3697, 0.0258, 0.0553, 0.1038, 0.0204, 0.0609, 0.1144], + device='cuda:0'), in_proj_covar=tensor([0.0136, 0.0219, 0.0096, 0.0104, 0.0172, 0.0084, 0.0111, 0.0126], + device='cuda:0'), out_proj_covar=tensor([1.3661e-04, 2.3522e-04, 9.7969e-05, 1.0148e-04, 1.7772e-04, 9.2208e-05, + 1.1333e-04, 1.2824e-04], device='cuda:0') +2023-03-27 17:50:27,886 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.447e+02 7.036e+02 8.570e+02 1.037e+03 1.553e+03, threshold=1.714e+03, percent-clipped=1.0 +2023-03-27 17:50:41,757 INFO [train.py:892] (0/4) Epoch 3, batch 1700, loss[loss=0.2886, simple_loss=0.3176, pruned_loss=0.1298, over 19757.00 frames. ], tot_loss[loss=0.3414, simple_loss=0.3653, pruned_loss=0.1587, over 3946929.50 frames. ], batch size: 125, lr: 3.84e-02, grad_scale: 8.0 +2023-03-27 17:52:19,755 INFO [train.py:892] (0/4) Epoch 3, batch 1750, loss[loss=0.2872, simple_loss=0.3299, pruned_loss=0.1223, over 19878.00 frames. ], tot_loss[loss=0.341, simple_loss=0.365, pruned_loss=0.1585, over 3947415.97 frames. ], batch size: 84, lr: 3.83e-02, grad_scale: 8.0 +2023-03-27 17:53:34,942 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.884e+02 6.493e+02 7.809e+02 9.948e+02 1.676e+03, threshold=1.562e+03, percent-clipped=0.0 +2023-03-27 17:53:46,191 INFO [train.py:892] (0/4) Epoch 3, batch 1800, loss[loss=0.3005, simple_loss=0.3241, pruned_loss=0.1385, over 19749.00 frames. ], tot_loss[loss=0.3399, simple_loss=0.3642, pruned_loss=0.1578, over 3948058.59 frames. ], batch size: 134, lr: 3.82e-02, grad_scale: 16.0 +2023-03-27 17:55:08,420 INFO [train.py:892] (0/4) Epoch 3, batch 1850, loss[loss=0.376, simple_loss=0.3981, pruned_loss=0.177, over 19852.00 frames. ], tot_loss[loss=0.3406, simple_loss=0.3661, pruned_loss=0.1575, over 3948313.79 frames. ], batch size: 58, lr: 3.81e-02, grad_scale: 16.0 +2023-03-27 17:55:15,728 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-3.pt +2023-03-27 17:56:05,352 INFO [train.py:892] (0/4) Epoch 4, batch 0, loss[loss=0.2839, simple_loss=0.3095, pruned_loss=0.1292, over 19765.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3095, pruned_loss=0.1292, over 19765.00 frames. ], batch size: 125, lr: 3.56e-02, grad_scale: 16.0 +2023-03-27 17:56:05,353 INFO [train.py:917] (0/4) Computing validation loss +2023-03-27 17:56:21,498 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5534, 3.6079, 4.4864, 4.9519, 3.5829, 4.0287, 4.0486, 3.1473], + device='cuda:0'), covar=tensor([0.0273, 0.2403, 0.0423, 0.0093, 0.1865, 0.0374, 0.0459, 0.1861], + device='cuda:0'), in_proj_covar=tensor([0.0110, 0.0263, 0.0133, 0.0088, 0.0189, 0.0103, 0.0128, 0.0184], + device='cuda:0'), out_proj_covar=tensor([1.1510e-04, 2.5885e-04, 1.3836e-04, 8.4456e-05, 1.8556e-04, 9.9827e-05, + 1.2950e-04, 1.8085e-04], device='cuda:0') +2023-03-27 17:56:31,644 INFO [train.py:926] (0/4) Epoch 4, validation: loss=0.2293, simple_loss=0.3025, pruned_loss=0.07807, over 2883724.00 frames. +2023-03-27 17:56:31,645 INFO [train.py:927] (0/4) Maximum memory allocated so far is 21704MB +2023-03-27 17:57:55,090 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.716e+02 7.146e+02 8.558e+02 9.901e+02 2.056e+03, threshold=1.712e+03, percent-clipped=2.0 +2023-03-27 17:58:21,195 INFO [train.py:892] (0/4) Epoch 4, batch 50, loss[loss=0.2862, simple_loss=0.3275, pruned_loss=0.1224, over 19880.00 frames. ], tot_loss[loss=0.3178, simple_loss=0.346, pruned_loss=0.1448, over 890746.25 frames. ], batch size: 47, lr: 3.55e-02, grad_scale: 16.0 +2023-03-27 17:59:35,979 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5650.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 18:00:10,204 INFO [train.py:892] (0/4) Epoch 4, batch 100, loss[loss=0.3263, simple_loss=0.3611, pruned_loss=0.1458, over 19753.00 frames. ], tot_loss[loss=0.3309, simple_loss=0.3582, pruned_loss=0.1518, over 1564394.17 frames. ], batch size: 110, lr: 3.54e-02, grad_scale: 16.0 +2023-03-27 18:00:52,045 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6830, 2.0344, 2.7831, 2.9862, 3.7613, 3.1628, 3.5074, 3.6609], + device='cuda:0'), covar=tensor([0.0389, 0.1615, 0.0782, 0.1159, 0.0565, 0.0529, 0.0265, 0.0250], + device='cuda:0'), in_proj_covar=tensor([0.0089, 0.0127, 0.0099, 0.0144, 0.0079, 0.0099, 0.0068, 0.0075], + device='cuda:0'), out_proj_covar=tensor([1.0546e-04, 1.3417e-04, 1.1784e-04, 1.5569e-04, 9.8784e-05, 1.1427e-04, + 7.6832e-05, 8.6528e-05], device='cuda:0') +2023-03-27 18:01:21,470 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.77 vs. limit=2.0 +2023-03-27 18:01:28,641 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.2945, 1.5872, 1.7025, 1.1954, 1.6397, 1.3620, 1.4952, 1.7545], + device='cuda:0'), covar=tensor([0.0382, 0.0565, 0.0435, 0.1211, 0.0700, 0.0674, 0.0584, 0.0628], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0030, 0.0030, 0.0044, 0.0042, 0.0031, 0.0026, 0.0031], + device='cuda:0'), out_proj_covar=tensor([4.7476e-05, 5.0577e-05, 4.8336e-05, 7.4717e-05, 7.1306e-05, 5.1376e-05, + 4.5958e-05, 5.1552e-05], device='cuda:0') +2023-03-27 18:01:31,610 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.543e+02 7.031e+02 8.200e+02 9.251e+02 1.434e+03, threshold=1.640e+03, percent-clipped=0.0 +2023-03-27 18:01:46,559 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5711.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 18:01:55,775 INFO [train.py:892] (0/4) Epoch 4, batch 150, loss[loss=0.2844, simple_loss=0.332, pruned_loss=0.1184, over 19746.00 frames. ], tot_loss[loss=0.3261, simple_loss=0.3539, pruned_loss=0.1492, over 2094576.69 frames. ], batch size: 44, lr: 3.54e-02, grad_scale: 16.0 +2023-03-27 18:02:04,194 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.87 vs. limit=2.0 +2023-03-27 18:02:30,156 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8914, 4.4629, 4.7375, 4.4611, 4.1400, 4.6028, 4.4136, 4.9703], + device='cuda:0'), covar=tensor([0.1596, 0.0273, 0.0316, 0.0251, 0.0451, 0.0267, 0.0286, 0.0223], + device='cuda:0'), in_proj_covar=tensor([0.0168, 0.0117, 0.0109, 0.0106, 0.0117, 0.0103, 0.0094, 0.0096], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-03-27 18:03:14,938 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4474, 3.4254, 4.3237, 3.4938, 3.8129, 4.1878, 4.0080, 4.1660], + device='cuda:0'), covar=tensor([0.0114, 0.0351, 0.0093, 0.1510, 0.0145, 0.0121, 0.0174, 0.0091], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0055, 0.0050, 0.0133, 0.0048, 0.0046, 0.0053, 0.0045], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0003, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-27 18:03:18,857 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5755.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:03:24,218 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.81 vs. limit=2.0 +2023-03-27 18:03:37,659 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6194, 2.0736, 2.9262, 2.9913, 3.6547, 3.0788, 3.6327, 3.6119], + device='cuda:0'), covar=tensor([0.0313, 0.1349, 0.0619, 0.1015, 0.0477, 0.0447, 0.0196, 0.0218], + device='cuda:0'), in_proj_covar=tensor([0.0091, 0.0127, 0.0100, 0.0147, 0.0080, 0.0100, 0.0069, 0.0076], + device='cuda:0'), out_proj_covar=tensor([1.0782e-04, 1.3490e-04, 1.1969e-04, 1.5932e-04, 9.9608e-05, 1.1605e-04, + 7.8334e-05, 8.7398e-05], device='cuda:0') +2023-03-27 18:03:42,976 INFO [train.py:892] (0/4) Epoch 4, batch 200, loss[loss=0.3436, simple_loss=0.3729, pruned_loss=0.1571, over 19875.00 frames. ], tot_loss[loss=0.3259, simple_loss=0.3543, pruned_loss=0.1488, over 2506248.73 frames. ], batch size: 53, lr: 3.53e-02, grad_scale: 16.0 +2023-03-27 18:04:38,350 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7442, 4.9294, 5.0122, 4.9854, 4.6326, 4.8607, 4.4212, 4.5271], + device='cuda:0'), covar=tensor([0.0331, 0.0284, 0.0553, 0.0314, 0.0588, 0.0686, 0.0550, 0.0966], + device='cuda:0'), in_proj_covar=tensor([0.0110, 0.0102, 0.0150, 0.0115, 0.0120, 0.0100, 0.0135, 0.0170], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 18:05:04,418 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4928, 2.9352, 3.4738, 2.8112, 2.9613, 3.3103, 3.1827, 3.3283], + device='cuda:0'), covar=tensor([0.0151, 0.0357, 0.0132, 0.1253, 0.0183, 0.0177, 0.0209, 0.0151], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0055, 0.0049, 0.0130, 0.0047, 0.0044, 0.0052, 0.0045], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0003, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-27 18:05:06,354 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1789, 1.8863, 1.3869, 2.0871, 2.2107, 2.0114, 2.2419, 1.8753], + device='cuda:0'), covar=tensor([0.0648, 0.0661, 0.1478, 0.0659, 0.0511, 0.0603, 0.0574, 0.0589], + device='cuda:0'), in_proj_covar=tensor([0.0071, 0.0057, 0.0090, 0.0059, 0.0057, 0.0048, 0.0056, 0.0052], + device='cuda:0'), out_proj_covar=tensor([1.1133e-04, 9.4120e-05, 1.4472e-04, 1.0113e-04, 9.2403e-05, 8.2419e-05, + 9.5307e-05, 8.9590e-05], device='cuda:0') +2023-03-27 18:05:07,192 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.424e+02 6.665e+02 7.849e+02 9.625e+02 1.890e+03, threshold=1.570e+03, percent-clipped=1.0 +2023-03-27 18:05:31,201 INFO [train.py:892] (0/4) Epoch 4, batch 250, loss[loss=0.3074, simple_loss=0.3406, pruned_loss=0.1371, over 19844.00 frames. ], tot_loss[loss=0.326, simple_loss=0.3542, pruned_loss=0.1489, over 2825682.82 frames. ], batch size: 190, lr: 3.52e-02, grad_scale: 16.0 +2023-03-27 18:05:32,110 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5816.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:06:59,820 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5858.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:07:17,598 INFO [train.py:892] (0/4) Epoch 4, batch 300, loss[loss=0.288, simple_loss=0.3293, pruned_loss=0.1234, over 19723.00 frames. ], tot_loss[loss=0.3279, simple_loss=0.3558, pruned_loss=0.15, over 3074895.69 frames. ], batch size: 104, lr: 3.51e-02, grad_scale: 16.0 +2023-03-27 18:08:37,830 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.638e+02 6.552e+02 7.972e+02 9.789e+02 1.946e+03, threshold=1.594e+03, percent-clipped=3.0 +2023-03-27 18:09:05,482 INFO [train.py:892] (0/4) Epoch 4, batch 350, loss[loss=0.319, simple_loss=0.3496, pruned_loss=0.1442, over 19714.00 frames. ], tot_loss[loss=0.3263, simple_loss=0.3547, pruned_loss=0.1489, over 3267836.61 frames. ], batch size: 62, lr: 3.50e-02, grad_scale: 16.0 +2023-03-27 18:09:12,441 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5919.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:10:48,437 INFO [train.py:892] (0/4) Epoch 4, batch 400, loss[loss=0.3372, simple_loss=0.3716, pruned_loss=0.1514, over 19548.00 frames. ], tot_loss[loss=0.3255, simple_loss=0.3543, pruned_loss=0.1483, over 3417056.76 frames. ], batch size: 47, lr: 3.49e-02, grad_scale: 16.0 +2023-03-27 18:11:41,660 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5990.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:11:43,679 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3092, 3.3510, 4.1211, 3.7156, 3.7883, 4.1697, 4.1754, 4.0219], + device='cuda:0'), covar=tensor([0.0150, 0.0397, 0.0120, 0.1020, 0.0120, 0.0136, 0.0158, 0.0143], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0055, 0.0048, 0.0129, 0.0049, 0.0046, 0.0051, 0.0046], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0003, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-27 18:12:00,469 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-6000.pt +2023-03-27 18:12:12,456 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.258e+02 6.552e+02 7.732e+02 9.637e+02 1.932e+03, threshold=1.546e+03, percent-clipped=3.0 +2023-03-27 18:12:17,156 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6006.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 18:12:36,856 INFO [train.py:892] (0/4) Epoch 4, batch 450, loss[loss=0.2892, simple_loss=0.3339, pruned_loss=0.1223, over 19805.00 frames. ], tot_loss[loss=0.3248, simple_loss=0.3538, pruned_loss=0.1479, over 3535477.66 frames. ], batch size: 107, lr: 3.48e-02, grad_scale: 16.0 +2023-03-27 18:13:33,659 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6043.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:13:50,817 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6051.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:14:22,267 INFO [train.py:892] (0/4) Epoch 4, batch 500, loss[loss=0.28, simple_loss=0.3222, pruned_loss=0.1189, over 19559.00 frames. ], tot_loss[loss=0.3263, simple_loss=0.3547, pruned_loss=0.1489, over 3625881.57 frames. ], batch size: 47, lr: 3.47e-02, grad_scale: 16.0 +2023-03-27 18:14:53,306 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3369, 3.9386, 4.0051, 4.3980, 3.9606, 4.4260, 4.2507, 4.5792], + device='cuda:0'), covar=tensor([0.0520, 0.0284, 0.0372, 0.0194, 0.0412, 0.0169, 0.0295, 0.0307], + device='cuda:0'), in_proj_covar=tensor([0.0094, 0.0096, 0.0106, 0.0095, 0.0093, 0.0076, 0.0103, 0.0114], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:0') +2023-03-27 18:15:43,597 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.065e+02 6.254e+02 7.910e+02 9.308e+02 1.450e+03, threshold=1.582e+03, percent-clipped=0.0 +2023-03-27 18:15:44,397 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6104.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:15:58,470 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6111.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:15:59,037 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.84 vs. limit=2.0 +2023-03-27 18:16:07,744 INFO [train.py:892] (0/4) Epoch 4, batch 550, loss[loss=0.2796, simple_loss=0.3029, pruned_loss=0.1281, over 19833.00 frames. ], tot_loss[loss=0.3252, simple_loss=0.354, pruned_loss=0.1482, over 3697952.33 frames. ], batch size: 146, lr: 3.47e-02, grad_scale: 16.0 +2023-03-27 18:16:51,667 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6135.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:17:55,485 INFO [train.py:892] (0/4) Epoch 4, batch 600, loss[loss=0.3304, simple_loss=0.3558, pruned_loss=0.1525, over 19807.00 frames. ], tot_loss[loss=0.3226, simple_loss=0.3515, pruned_loss=0.1468, over 3754151.29 frames. ], batch size: 68, lr: 3.46e-02, grad_scale: 16.0 +2023-03-27 18:18:03,545 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6170.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:18:56,296 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6196.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:19:13,218 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6203.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:19:16,015 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.339e+02 6.549e+02 8.017e+02 9.808e+02 1.883e+03, threshold=1.603e+03, percent-clipped=1.0 +2023-03-27 18:19:29,980 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6211.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:19:37,336 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6214.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:19:40,631 INFO [train.py:892] (0/4) Epoch 4, batch 650, loss[loss=0.3161, simple_loss=0.3591, pruned_loss=0.1366, over 19860.00 frames. ], tot_loss[loss=0.32, simple_loss=0.3493, pruned_loss=0.1453, over 3798098.90 frames. ], batch size: 78, lr: 3.45e-02, grad_scale: 16.0 +2023-03-27 18:20:11,623 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6231.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:21:22,754 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6264.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:21:25,682 INFO [train.py:892] (0/4) Epoch 4, batch 700, loss[loss=0.2844, simple_loss=0.3234, pruned_loss=0.1227, over 19958.00 frames. ], tot_loss[loss=0.318, simple_loss=0.3483, pruned_loss=0.1439, over 3831115.24 frames. ], batch size: 53, lr: 3.44e-02, grad_scale: 16.0 +2023-03-27 18:21:26,609 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6266.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:21:41,848 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6272.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:22:47,947 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.869e+02 6.690e+02 8.159e+02 9.859e+02 1.679e+03, threshold=1.632e+03, percent-clipped=2.0 +2023-03-27 18:22:52,726 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6306.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 18:23:15,289 INFO [train.py:892] (0/4) Epoch 4, batch 750, loss[loss=0.2992, simple_loss=0.3295, pruned_loss=0.1345, over 19872.00 frames. ], tot_loss[loss=0.3192, simple_loss=0.3488, pruned_loss=0.1447, over 3858462.83 frames. ], batch size: 138, lr: 3.43e-02, grad_scale: 8.0 +2023-03-27 18:23:38,507 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6327.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:24:16,262 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6346.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:24:34,071 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6354.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 18:24:57,112 INFO [train.py:892] (0/4) Epoch 4, batch 800, loss[loss=0.479, simple_loss=0.4646, pruned_loss=0.2468, over 19581.00 frames. ], tot_loss[loss=0.3237, simple_loss=0.3523, pruned_loss=0.1476, over 3877457.63 frames. ], batch size: 376, lr: 3.42e-02, grad_scale: 8.0 +2023-03-27 18:25:05,765 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7675, 4.9596, 5.2593, 5.0694, 5.1738, 4.8518, 4.8331, 4.8233], + device='cuda:0'), covar=tensor([0.1126, 0.0743, 0.0942, 0.0709, 0.0633, 0.0783, 0.1715, 0.1971], + device='cuda:0'), in_proj_covar=tensor([0.0181, 0.0151, 0.0218, 0.0169, 0.0169, 0.0157, 0.0192, 0.0244], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-27 18:26:05,806 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6399.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:26:17,456 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.240e+02 6.935e+02 8.296e+02 1.069e+03 2.251e+03, threshold=1.659e+03, percent-clipped=2.0 +2023-03-27 18:26:29,626 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6411.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:26:38,523 INFO [train.py:892] (0/4) Epoch 4, batch 850, loss[loss=0.3002, simple_loss=0.3304, pruned_loss=0.135, over 19795.00 frames. ], tot_loss[loss=0.3234, simple_loss=0.3522, pruned_loss=0.1473, over 3893910.47 frames. ], batch size: 185, lr: 3.42e-02, grad_scale: 8.0 +2023-03-27 18:27:15,436 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.3564, 2.6928, 2.9489, 2.6024, 2.8957, 2.9507, 2.5910, 3.0618], + device='cuda:0'), covar=tensor([0.1714, 0.0429, 0.0406, 0.0420, 0.0460, 0.0488, 0.0392, 0.0316], + device='cuda:0'), in_proj_covar=tensor([0.0170, 0.0120, 0.0112, 0.0112, 0.0123, 0.0111, 0.0097, 0.0100], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 18:27:53,649 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.90 vs. limit=2.0 +2023-03-27 18:28:08,153 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6459.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:28:14,568 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6462.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:28:23,857 INFO [train.py:892] (0/4) Epoch 4, batch 900, loss[loss=0.343, simple_loss=0.3507, pruned_loss=0.1677, over 19836.00 frames. ], tot_loss[loss=0.3218, simple_loss=0.3516, pruned_loss=0.1459, over 3906072.78 frames. ], batch size: 184, lr: 3.41e-02, grad_scale: 8.0 +2023-03-27 18:28:26,702 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7962, 2.5476, 1.3374, 3.1707, 3.0359, 3.0588, 3.2286, 2.4355], + device='cuda:0'), covar=tensor([0.0759, 0.0601, 0.2521, 0.0644, 0.0501, 0.0556, 0.0624, 0.0554], + device='cuda:0'), in_proj_covar=tensor([0.0073, 0.0059, 0.0091, 0.0063, 0.0060, 0.0050, 0.0058, 0.0057], + device='cuda:0'), out_proj_covar=tensor([1.2198e-04, 1.0356e-04, 1.5447e-04, 1.1331e-04, 1.0404e-04, 9.3822e-05, + 1.0670e-04, 1.0639e-04], device='cuda:0') +2023-03-27 18:28:26,706 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6467.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:29:14,977 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6491.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:29:17,642 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-27 18:29:44,756 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.785e+02 6.152e+02 8.046e+02 9.426e+02 1.704e+03, threshold=1.609e+03, percent-clipped=1.0 +2023-03-27 18:30:02,701 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6514.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:30:05,644 INFO [train.py:892] (0/4) Epoch 4, batch 950, loss[loss=0.3447, simple_loss=0.3497, pruned_loss=0.1699, over 19755.00 frames. ], tot_loss[loss=0.3223, simple_loss=0.3523, pruned_loss=0.1462, over 3916958.22 frames. ], batch size: 188, lr: 3.40e-02, grad_scale: 8.0 +2023-03-27 18:30:19,440 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6523.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 18:30:24,479 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6526.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:30:28,551 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6528.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:31:32,878 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6559.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:31:38,622 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6562.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:31:46,898 INFO [train.py:892] (0/4) Epoch 4, batch 1000, loss[loss=0.2784, simple_loss=0.3126, pruned_loss=0.1221, over 19765.00 frames. ], tot_loss[loss=0.3206, simple_loss=0.3512, pruned_loss=0.1451, over 3924569.39 frames. ], batch size: 130, lr: 3.39e-02, grad_scale: 8.0 +2023-03-27 18:31:49,565 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6567.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:32:28,748 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0339, 3.6908, 3.6348, 4.1158, 3.7910, 3.9812, 4.0074, 4.2234], + device='cuda:0'), covar=tensor([0.0565, 0.0305, 0.0419, 0.0209, 0.0395, 0.0281, 0.0280, 0.0313], + device='cuda:0'), in_proj_covar=tensor([0.0093, 0.0097, 0.0112, 0.0097, 0.0097, 0.0080, 0.0106, 0.0115], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 18:33:01,400 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.69 vs. limit=2.0 +2023-03-27 18:33:08,466 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.155e+02 6.376e+02 7.596e+02 9.023e+02 1.357e+03, threshold=1.519e+03, percent-clipped=0.0 +2023-03-27 18:33:31,602 INFO [train.py:892] (0/4) Epoch 4, batch 1050, loss[loss=0.3141, simple_loss=0.3493, pruned_loss=0.1395, over 19828.00 frames. ], tot_loss[loss=0.3213, simple_loss=0.3519, pruned_loss=0.1454, over 3929351.87 frames. ], batch size: 93, lr: 3.38e-02, grad_scale: 8.0 +2023-03-27 18:33:45,120 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6622.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:33:59,002 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6629.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:34:34,276 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6646.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:35:14,541 INFO [train.py:892] (0/4) Epoch 4, batch 1100, loss[loss=0.3273, simple_loss=0.3544, pruned_loss=0.1501, over 19819.00 frames. ], tot_loss[loss=0.3213, simple_loss=0.3516, pruned_loss=0.1455, over 3934969.38 frames. ], batch size: 288, lr: 3.37e-02, grad_scale: 8.0 +2023-03-27 18:35:39,674 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0479, 4.6075, 4.5367, 5.0667, 4.7436, 5.2082, 4.9662, 5.2566], + device='cuda:0'), covar=tensor([0.0492, 0.0243, 0.0329, 0.0192, 0.0345, 0.0099, 0.0308, 0.0267], + device='cuda:0'), in_proj_covar=tensor([0.0093, 0.0097, 0.0112, 0.0098, 0.0096, 0.0081, 0.0105, 0.0115], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 18:36:06,433 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6690.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:36:14,099 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6694.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:36:23,819 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6699.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:36:34,437 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.080e+02 6.475e+02 7.910e+02 9.923e+02 2.054e+03, threshold=1.582e+03, percent-clipped=5.0 +2023-03-27 18:36:57,929 INFO [train.py:892] (0/4) Epoch 4, batch 1150, loss[loss=0.2764, simple_loss=0.3229, pruned_loss=0.1149, over 19812.00 frames. ], tot_loss[loss=0.3212, simple_loss=0.3515, pruned_loss=0.1454, over 3937976.52 frames. ], batch size: 50, lr: 3.37e-02, grad_scale: 8.0 +2023-03-27 18:37:34,572 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-27 18:38:01,810 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6747.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:38:41,081 INFO [train.py:892] (0/4) Epoch 4, batch 1200, loss[loss=0.2894, simple_loss=0.3192, pruned_loss=0.1298, over 19756.00 frames. ], tot_loss[loss=0.3188, simple_loss=0.3495, pruned_loss=0.144, over 3942641.19 frames. ], batch size: 129, lr: 3.36e-02, grad_scale: 8.0 +2023-03-27 18:39:32,819 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6791.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:39:59,924 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.559e+02 6.894e+02 8.217e+02 1.051e+03 2.121e+03, threshold=1.643e+03, percent-clipped=4.0 +2023-03-27 18:40:22,609 INFO [train.py:892] (0/4) Epoch 4, batch 1250, loss[loss=0.3021, simple_loss=0.3253, pruned_loss=0.1395, over 19813.00 frames. ], tot_loss[loss=0.3158, simple_loss=0.3469, pruned_loss=0.1423, over 3946154.78 frames. ], batch size: 202, lr: 3.35e-02, grad_scale: 8.0 +2023-03-27 18:40:26,921 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6818.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 18:40:36,461 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6823.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:40:42,269 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6826.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:41:10,805 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6839.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:41:50,004 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6859.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:42:04,703 INFO [train.py:892] (0/4) Epoch 4, batch 1300, loss[loss=0.3547, simple_loss=0.3789, pruned_loss=0.1652, over 19693.00 frames. ], tot_loss[loss=0.3175, simple_loss=0.3485, pruned_loss=0.1432, over 3947207.72 frames. ], batch size: 305, lr: 3.34e-02, grad_scale: 8.0 +2023-03-27 18:42:07,207 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6867.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:42:21,483 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6874.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:42:29,127 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6878.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 18:43:24,638 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.702e+02 6.185e+02 7.634e+02 1.003e+03 1.964e+03, threshold=1.527e+03, percent-clipped=1.0 +2023-03-27 18:43:29,132 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6907.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:43:34,552 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6909.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:43:45,418 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6915.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:43:46,690 INFO [train.py:892] (0/4) Epoch 4, batch 1350, loss[loss=0.294, simple_loss=0.328, pruned_loss=0.13, over 19877.00 frames. ], tot_loss[loss=0.316, simple_loss=0.3476, pruned_loss=0.1422, over 3949051.65 frames. ], batch size: 63, lr: 3.33e-02, grad_scale: 8.0 +2023-03-27 18:43:49,379 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4262, 2.6157, 2.7268, 2.1520, 2.7242, 1.8148, 2.4613, 2.6826], + device='cuda:0'), covar=tensor([0.0512, 0.0259, 0.0308, 0.0501, 0.0315, 0.0455, 0.0390, 0.0300], + device='cuda:0'), in_proj_covar=tensor([0.0045, 0.0037, 0.0039, 0.0056, 0.0040, 0.0035, 0.0035, 0.0032], + device='cuda:0'), out_proj_covar=tensor([9.3164e-05, 7.7757e-05, 8.0429e-05, 1.0308e-04, 8.3298e-05, 7.5487e-05, + 7.6664e-05, 6.8240e-05], device='cuda:0') +2023-03-27 18:43:53,220 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2832, 3.3308, 4.0305, 4.7232, 2.8025, 3.5457, 3.4165, 2.6110], + device='cuda:0'), covar=tensor([0.0325, 0.3136, 0.0655, 0.0111, 0.2395, 0.0460, 0.0643, 0.2075], + device='cuda:0'), in_proj_covar=tensor([0.0122, 0.0281, 0.0154, 0.0090, 0.0202, 0.0111, 0.0141, 0.0184], + device='cuda:0'), out_proj_covar=tensor([1.3650e-04, 2.8516e-04, 1.6591e-04, 9.2073e-05, 2.0727e-04, 1.1504e-04, + 1.4767e-04, 1.9188e-04], device='cuda:0') +2023-03-27 18:43:58,817 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6922.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:44:32,652 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6939.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 18:45:27,451 INFO [train.py:892] (0/4) Epoch 4, batch 1400, loss[loss=0.3863, simple_loss=0.3921, pruned_loss=0.1902, over 19773.00 frames. ], tot_loss[loss=0.3139, simple_loss=0.3461, pruned_loss=0.1409, over 3949980.37 frames. ], batch size: 280, lr: 3.33e-02, grad_scale: 8.0 +2023-03-27 18:45:37,278 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6970.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:45:37,473 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6970.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:46:09,090 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6985.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:46:49,377 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.908e+02 6.198e+02 7.481e+02 9.100e+02 1.903e+03, threshold=1.496e+03, percent-clipped=2.0 +2023-03-27 18:47:13,278 INFO [train.py:892] (0/4) Epoch 4, batch 1450, loss[loss=0.3028, simple_loss=0.3381, pruned_loss=0.1337, over 19735.00 frames. ], tot_loss[loss=0.3132, simple_loss=0.346, pruned_loss=0.1402, over 3950611.95 frames. ], batch size: 118, lr: 3.32e-02, grad_scale: 8.0 +2023-03-27 18:47:19,839 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.3651, 2.0675, 1.7101, 0.9947, 1.6535, 2.0158, 2.0299, 1.7502], + device='cuda:0'), covar=tensor([0.0386, 0.0309, 0.0299, 0.1084, 0.0793, 0.0278, 0.0186, 0.0486], + device='cuda:0'), in_proj_covar=tensor([0.0032, 0.0030, 0.0030, 0.0046, 0.0045, 0.0030, 0.0026, 0.0031], + device='cuda:0'), out_proj_covar=tensor([5.8483e-05, 6.0029e-05, 5.5085e-05, 8.6602e-05, 8.6012e-05, 5.8412e-05, + 5.1105e-05, 5.8900e-05], device='cuda:0') +2023-03-27 18:48:17,394 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7048.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 18:48:19,131 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.8059, 6.0740, 6.0142, 5.9296, 5.8261, 5.9314, 5.2508, 5.3446], + device='cuda:0'), covar=tensor([0.0258, 0.0236, 0.0505, 0.0306, 0.0459, 0.0474, 0.0424, 0.0872], + device='cuda:0'), in_proj_covar=tensor([0.0123, 0.0113, 0.0169, 0.0131, 0.0127, 0.0113, 0.0148, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 18:48:54,061 INFO [train.py:892] (0/4) Epoch 4, batch 1500, loss[loss=0.2665, simple_loss=0.304, pruned_loss=0.1145, over 19855.00 frames. ], tot_loss[loss=0.3118, simple_loss=0.3446, pruned_loss=0.1395, over 3950797.84 frames. ], batch size: 118, lr: 3.31e-02, grad_scale: 8.0 +2023-03-27 18:49:21,159 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8647, 2.7989, 1.3704, 3.7320, 3.3061, 3.4753, 3.7024, 2.5836], + device='cuda:0'), covar=tensor([0.0799, 0.0585, 0.2062, 0.0439, 0.0489, 0.0419, 0.0531, 0.0545], + device='cuda:0'), in_proj_covar=tensor([0.0084, 0.0068, 0.0101, 0.0073, 0.0068, 0.0058, 0.0066, 0.0071], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-27 18:49:48,720 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.57 vs. limit=2.0 +2023-03-27 18:50:13,928 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.314e+02 6.331e+02 7.635e+02 9.535e+02 1.700e+03, threshold=1.527e+03, percent-clipped=1.0 +2023-03-27 18:50:22,674 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7109.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 18:50:36,735 INFO [train.py:892] (0/4) Epoch 4, batch 1550, loss[loss=0.2838, simple_loss=0.3184, pruned_loss=0.1245, over 19755.00 frames. ], tot_loss[loss=0.3134, simple_loss=0.3463, pruned_loss=0.1403, over 3949679.15 frames. ], batch size: 205, lr: 3.30e-02, grad_scale: 8.0 +2023-03-27 18:50:41,179 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7118.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 18:50:51,444 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7123.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:52:15,285 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7163.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:52:19,898 INFO [train.py:892] (0/4) Epoch 4, batch 1600, loss[loss=0.4941, simple_loss=0.4701, pruned_loss=0.259, over 19556.00 frames. ], tot_loss[loss=0.3145, simple_loss=0.3467, pruned_loss=0.1411, over 3950511.53 frames. ], batch size: 376, lr: 3.30e-02, grad_scale: 8.0 +2023-03-27 18:52:20,694 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7166.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:52:31,406 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7171.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:53:39,224 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.151e+02 6.559e+02 7.813e+02 1.032e+03 1.739e+03, threshold=1.563e+03, percent-clipped=2.0 +2023-03-27 18:54:01,349 INFO [train.py:892] (0/4) Epoch 4, batch 1650, loss[loss=0.2921, simple_loss=0.318, pruned_loss=0.1331, over 19769.00 frames. ], tot_loss[loss=0.3146, simple_loss=0.3468, pruned_loss=0.1412, over 3949906.25 frames. ], batch size: 155, lr: 3.29e-02, grad_scale: 8.0 +2023-03-27 18:54:18,756 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7224.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 18:54:37,853 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7234.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 18:55:42,220 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7265.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:55:43,212 INFO [train.py:892] (0/4) Epoch 4, batch 1700, loss[loss=0.3015, simple_loss=0.3331, pruned_loss=0.1349, over 19765.00 frames. ], tot_loss[loss=0.3129, simple_loss=0.3457, pruned_loss=0.14, over 3950201.28 frames. ], batch size: 193, lr: 3.28e-02, grad_scale: 8.0 +2023-03-27 18:56:02,960 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7275.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:56:23,710 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7285.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:57:00,748 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.185e+02 6.685e+02 7.833e+02 9.153e+02 1.678e+03, threshold=1.567e+03, percent-clipped=4.0 +2023-03-27 18:57:20,324 INFO [train.py:892] (0/4) Epoch 4, batch 1750, loss[loss=0.297, simple_loss=0.3405, pruned_loss=0.1267, over 19803.00 frames. ], tot_loss[loss=0.3113, simple_loss=0.3445, pruned_loss=0.1391, over 3949852.11 frames. ], batch size: 74, lr: 3.27e-02, grad_scale: 8.0 +2023-03-27 18:57:51,811 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7333.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:57:57,604 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7336.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 18:58:49,017 INFO [train.py:892] (0/4) Epoch 4, batch 1800, loss[loss=0.3474, simple_loss=0.373, pruned_loss=0.1609, over 19755.00 frames. ], tot_loss[loss=0.3098, simple_loss=0.3438, pruned_loss=0.1379, over 3948669.51 frames. ], batch size: 221, lr: 3.27e-02, grad_scale: 8.0 +2023-03-27 18:58:53,031 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.4501, 4.9417, 4.9105, 5.4758, 4.9324, 5.6097, 5.3484, 5.6034], + device='cuda:0'), covar=tensor([0.0372, 0.0210, 0.0284, 0.0160, 0.0396, 0.0123, 0.0217, 0.0218], + device='cuda:0'), in_proj_covar=tensor([0.0094, 0.0100, 0.0113, 0.0102, 0.0099, 0.0082, 0.0106, 0.0114], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 18:59:06,994 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=2.01 vs. limit=2.0 +2023-03-27 18:59:52,472 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7404.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 18:59:53,631 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.607e+02 6.051e+02 7.381e+02 9.131e+02 1.849e+03, threshold=1.476e+03, percent-clipped=3.0 +2023-03-27 18:59:57,443 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7407.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:00:11,534 INFO [train.py:892] (0/4) Epoch 4, batch 1850, loss[loss=0.3077, simple_loss=0.3529, pruned_loss=0.1313, over 19847.00 frames. ], tot_loss[loss=0.31, simple_loss=0.3451, pruned_loss=0.1375, over 3947466.76 frames. ], batch size: 58, lr: 3.26e-02, grad_scale: 8.0 +2023-03-27 19:00:18,775 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-4.pt +2023-03-27 19:01:08,059 INFO [train.py:892] (0/4) Epoch 5, batch 0, loss[loss=0.2938, simple_loss=0.32, pruned_loss=0.1338, over 19763.00 frames. ], tot_loss[loss=0.2938, simple_loss=0.32, pruned_loss=0.1338, over 19763.00 frames. ], batch size: 125, lr: 3.03e-02, grad_scale: 8.0 +2023-03-27 19:01:08,060 INFO [train.py:917] (0/4) Computing validation loss +2023-03-27 19:01:34,467 INFO [train.py:926] (0/4) Epoch 5, validation: loss=0.2154, simple_loss=0.2917, pruned_loss=0.06955, over 2883724.00 frames. +2023-03-27 19:01:34,468 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-27 19:01:52,254 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1889, 2.1988, 2.0809, 1.9950, 1.3898, 1.2894, 1.6605, 2.2642], + device='cuda:0'), covar=tensor([0.0336, 0.0307, 0.0383, 0.0321, 0.0394, 0.0761, 0.0616, 0.0625], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0029, 0.0030, 0.0026, 0.0028, 0.0031, 0.0037, 0.0028], + device='cuda:0'), out_proj_covar=tensor([5.5745e-05, 5.4754e-05, 5.7701e-05, 5.1694e-05, 5.6072e-05, 5.9783e-05, + 7.1001e-05, 5.5956e-05], device='cuda:0') +2023-03-27 19:02:10,462 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.01 vs. limit=2.0 +2023-03-27 19:03:19,054 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7468.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:03:24,022 INFO [train.py:892] (0/4) Epoch 5, batch 50, loss[loss=0.2675, simple_loss=0.3078, pruned_loss=0.1136, over 19790.00 frames. ], tot_loss[loss=0.2985, simple_loss=0.3332, pruned_loss=0.1319, over 891194.14 frames. ], batch size: 172, lr: 3.03e-02, grad_scale: 8.0 +2023-03-27 19:03:49,407 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-27 19:03:59,212 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8254, 3.7912, 2.6581, 4.3374, 4.4264, 1.7023, 3.6695, 3.6327], + device='cuda:0'), covar=tensor([0.0494, 0.0723, 0.1986, 0.0290, 0.0102, 0.2725, 0.0685, 0.0330], + device='cuda:0'), in_proj_covar=tensor([0.0120, 0.0139, 0.0165, 0.0088, 0.0071, 0.0169, 0.0158, 0.0098], + device='cuda:0'), out_proj_covar=tensor([1.3671e-04, 1.5716e-04, 1.7852e-04, 1.0684e-04, 8.1850e-05, 1.7696e-04, + 1.7467e-04, 1.0735e-04], device='cuda:0') +2023-03-27 19:04:36,700 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.390e+02 6.285e+02 7.498e+02 8.995e+02 1.568e+03, threshold=1.500e+03, percent-clipped=1.0 +2023-03-27 19:05:06,314 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7519.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 19:05:09,582 INFO [train.py:892] (0/4) Epoch 5, batch 100, loss[loss=0.262, simple_loss=0.3044, pruned_loss=0.1098, over 19660.00 frames. ], tot_loss[loss=0.2987, simple_loss=0.3351, pruned_loss=0.1311, over 1568292.57 frames. ], batch size: 58, lr: 3.02e-02, grad_scale: 8.0 +2023-03-27 19:05:37,871 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7534.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 19:06:14,897 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.76 vs. limit=2.0 +2023-03-27 19:06:43,420 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7565.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:06:53,842 INFO [train.py:892] (0/4) Epoch 5, batch 150, loss[loss=0.2849, simple_loss=0.3233, pruned_loss=0.1233, over 19647.00 frames. ], tot_loss[loss=0.297, simple_loss=0.3339, pruned_loss=0.1301, over 2097644.10 frames. ], batch size: 47, lr: 3.01e-02, grad_scale: 8.0 +2023-03-27 19:07:19,227 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7582.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 19:07:42,964 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.82 vs. limit=2.0 +2023-03-27 19:08:06,879 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.365e+02 5.876e+02 7.241e+02 9.334e+02 1.719e+03, threshold=1.448e+03, percent-clipped=1.0 +2023-03-27 19:08:24,602 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7613.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:08:42,164 INFO [train.py:892] (0/4) Epoch 5, batch 200, loss[loss=0.2915, simple_loss=0.3261, pruned_loss=0.1285, over 19813.00 frames. ], tot_loss[loss=0.2987, simple_loss=0.335, pruned_loss=0.1312, over 2509105.46 frames. ], batch size: 72, lr: 3.01e-02, grad_scale: 8.0 +2023-03-27 19:08:48,990 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7624.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:09:02,473 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7631.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:10:28,526 INFO [train.py:892] (0/4) Epoch 5, batch 250, loss[loss=0.2973, simple_loss=0.3397, pruned_loss=0.1275, over 19803.00 frames. ], tot_loss[loss=0.3007, simple_loss=0.3365, pruned_loss=0.1324, over 2828083.64 frames. ], batch size: 74, lr: 3.00e-02, grad_scale: 8.0 +2023-03-27 19:10:55,844 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7685.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:11:37,109 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7704.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 19:11:38,138 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.842e+02 5.939e+02 7.915e+02 9.500e+02 2.384e+03, threshold=1.583e+03, percent-clipped=4.0 +2023-03-27 19:11:53,933 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0501, 5.2206, 5.5411, 5.2654, 5.2853, 5.0516, 5.1378, 5.0286], + device='cuda:0'), covar=tensor([0.1135, 0.0704, 0.0841, 0.0702, 0.0677, 0.0804, 0.1720, 0.1978], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0164, 0.0234, 0.0186, 0.0182, 0.0173, 0.0218, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-27 19:12:05,717 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.5925, 1.7017, 1.8190, 1.7635, 1.9524, 1.6056, 1.8415, 1.9157], + device='cuda:0'), covar=tensor([0.0491, 0.0381, 0.0356, 0.0605, 0.0331, 0.0381, 0.0352, 0.0234], + device='cuda:0'), in_proj_covar=tensor([0.0044, 0.0039, 0.0041, 0.0060, 0.0041, 0.0036, 0.0038, 0.0034], + device='cuda:0'), out_proj_covar=tensor([9.7602e-05, 8.8544e-05, 9.2068e-05, 1.1858e-04, 9.1483e-05, 8.2446e-05, + 8.8321e-05, 7.7960e-05], device='cuda:0') +2023-03-27 19:12:10,448 INFO [train.py:892] (0/4) Epoch 5, batch 300, loss[loss=0.2781, simple_loss=0.3219, pruned_loss=0.1172, over 19832.00 frames. ], tot_loss[loss=0.301, simple_loss=0.3375, pruned_loss=0.1322, over 3077585.80 frames. ], batch size: 101, lr: 2.99e-02, grad_scale: 8.0 +2023-03-27 19:12:43,379 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.2287, 2.3868, 2.7442, 2.2947, 2.6852, 2.1577, 2.2792, 2.7041], + device='cuda:0'), covar=tensor([0.0529, 0.0272, 0.0347, 0.0513, 0.0289, 0.0435, 0.0357, 0.0251], + device='cuda:0'), in_proj_covar=tensor([0.0043, 0.0038, 0.0040, 0.0059, 0.0040, 0.0035, 0.0037, 0.0033], + device='cuda:0'), out_proj_covar=tensor([9.5430e-05, 8.6669e-05, 8.9981e-05, 1.1666e-04, 8.9566e-05, 8.0631e-05, + 8.6759e-05, 7.6236e-05], device='cuda:0') +2023-03-27 19:12:59,417 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7743.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:13:18,450 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7752.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 19:13:18,900 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.06 vs. limit=2.0 +2023-03-27 19:13:42,630 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7763.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:13:48,022 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.5218, 1.5958, 1.8104, 1.4444, 1.2265, 1.4882, 1.3556, 1.6821], + device='cuda:0'), covar=tensor([0.0207, 0.0240, 0.0234, 0.0333, 0.0465, 0.0367, 0.0515, 0.0314], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0029, 0.0030, 0.0026, 0.0029, 0.0030, 0.0036, 0.0028], + device='cuda:0'), out_proj_covar=tensor([5.6510e-05, 5.6280e-05, 5.7762e-05, 5.2866e-05, 5.9622e-05, 5.9994e-05, + 6.9876e-05, 5.6212e-05], device='cuda:0') +2023-03-27 19:13:58,905 INFO [train.py:892] (0/4) Epoch 5, batch 350, loss[loss=0.2654, simple_loss=0.309, pruned_loss=0.1109, over 19849.00 frames. ], tot_loss[loss=0.3022, simple_loss=0.3378, pruned_loss=0.1333, over 3272137.40 frames. ], batch size: 81, lr: 2.98e-02, grad_scale: 8.0 +2023-03-27 19:14:48,337 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.4925, 1.5804, 1.6755, 1.5045, 1.2540, 1.4913, 1.3705, 1.5962], + device='cuda:0'), covar=tensor([0.0194, 0.0268, 0.0257, 0.0249, 0.0307, 0.0323, 0.0431, 0.0352], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0030, 0.0030, 0.0027, 0.0030, 0.0031, 0.0037, 0.0028], + device='cuda:0'), out_proj_covar=tensor([5.7834e-05, 5.8253e-05, 5.8946e-05, 5.4331e-05, 6.0462e-05, 6.1201e-05, + 7.0694e-05, 5.7375e-05], device='cuda:0') +2023-03-27 19:15:04,427 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7802.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:15:08,580 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7804.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:15:09,458 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.882e+02 6.339e+02 7.549e+02 9.099e+02 1.830e+03, threshold=1.510e+03, percent-clipped=2.0 +2023-03-27 19:15:38,914 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7819.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 19:15:42,001 INFO [train.py:892] (0/4) Epoch 5, batch 400, loss[loss=0.2926, simple_loss=0.3355, pruned_loss=0.1248, over 19870.00 frames. ], tot_loss[loss=0.3011, simple_loss=0.3373, pruned_loss=0.1324, over 3423724.90 frames. ], batch size: 48, lr: 2.98e-02, grad_scale: 8.0 +2023-03-27 19:15:53,660 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1886, 4.1777, 2.7673, 4.8120, 5.0394, 2.3844, 4.1535, 4.1197], + device='cuda:0'), covar=tensor([0.0530, 0.0709, 0.2264, 0.0252, 0.0078, 0.2757, 0.0691, 0.0344], + device='cuda:0'), in_proj_covar=tensor([0.0121, 0.0140, 0.0171, 0.0090, 0.0069, 0.0170, 0.0161, 0.0102], + device='cuda:0'), out_proj_covar=tensor([1.3969e-04, 1.5970e-04, 1.8509e-04, 1.1030e-04, 8.1063e-05, 1.7900e-04, + 1.7941e-04, 1.1339e-04], device='cuda:0') +2023-03-27 19:16:57,864 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1779, 2.0833, 2.4081, 2.3833, 2.9057, 2.6147, 2.9482, 2.9567], + device='cuda:0'), covar=tensor([0.0446, 0.1250, 0.0951, 0.1259, 0.0728, 0.0800, 0.0320, 0.0383], + device='cuda:0'), in_proj_covar=tensor([0.0131, 0.0159, 0.0150, 0.0182, 0.0147, 0.0152, 0.0099, 0.0102], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-03-27 19:17:11,433 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7863.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:17:19,183 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7867.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:17:26,100 INFO [train.py:892] (0/4) Epoch 5, batch 450, loss[loss=0.3193, simple_loss=0.3507, pruned_loss=0.1439, over 19749.00 frames. ], tot_loss[loss=0.3029, simple_loss=0.3391, pruned_loss=0.1334, over 3540465.79 frames. ], batch size: 276, lr: 2.97e-02, grad_scale: 8.0 +2023-03-27 19:17:52,806 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=5.11 vs. limit=5.0 +2023-03-27 19:18:37,148 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.709e+02 6.480e+02 7.693e+02 9.079e+02 2.029e+03, threshold=1.539e+03, percent-clipped=2.0 +2023-03-27 19:19:12,479 INFO [train.py:892] (0/4) Epoch 5, batch 500, loss[loss=0.3356, simple_loss=0.3701, pruned_loss=0.1506, over 19654.00 frames. ], tot_loss[loss=0.3058, simple_loss=0.3415, pruned_loss=0.135, over 3630786.28 frames. ], batch size: 57, lr: 2.96e-02, grad_scale: 8.0 +2023-03-27 19:19:33,195 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7931.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:20:57,021 INFO [train.py:892] (0/4) Epoch 5, batch 550, loss[loss=0.2976, simple_loss=0.3327, pruned_loss=0.1313, over 19709.00 frames. ], tot_loss[loss=0.3047, simple_loss=0.3408, pruned_loss=0.1343, over 3700463.78 frames. ], batch size: 85, lr: 2.96e-02, grad_scale: 8.0 +2023-03-27 19:21:13,882 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7979.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:21:15,760 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7980.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:21:57,653 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-8000.pt +2023-03-27 19:22:12,796 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.154e+02 6.076e+02 7.580e+02 9.097e+02 2.073e+03, threshold=1.516e+03, percent-clipped=3.0 +2023-03-27 19:22:44,345 INFO [train.py:892] (0/4) Epoch 5, batch 600, loss[loss=0.3168, simple_loss=0.3624, pruned_loss=0.1355, over 19838.00 frames. ], tot_loss[loss=0.3044, simple_loss=0.3403, pruned_loss=0.1342, over 3755588.64 frames. ], batch size: 57, lr: 2.95e-02, grad_scale: 8.0 +2023-03-27 19:24:11,371 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8063.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:24:28,880 INFO [train.py:892] (0/4) Epoch 5, batch 650, loss[loss=0.2919, simple_loss=0.3344, pruned_loss=0.1247, over 19812.00 frames. ], tot_loss[loss=0.3033, simple_loss=0.3394, pruned_loss=0.1336, over 3797570.90 frames. ], batch size: 96, lr: 2.94e-02, grad_scale: 8.0 +2023-03-27 19:25:31,087 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8099.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:25:43,551 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.014e+02 6.583e+02 7.852e+02 9.012e+02 1.490e+03, threshold=1.570e+03, percent-clipped=0.0 +2023-03-27 19:25:56,206 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8111.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:26:16,753 INFO [train.py:892] (0/4) Epoch 5, batch 700, loss[loss=0.2809, simple_loss=0.3129, pruned_loss=0.1244, over 19763.00 frames. ], tot_loss[loss=0.3019, simple_loss=0.3388, pruned_loss=0.1325, over 3831890.94 frames. ], batch size: 217, lr: 2.94e-02, grad_scale: 8.0 +2023-03-27 19:26:38,449 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.26 vs. limit=5.0 +2023-03-27 19:27:37,965 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8158.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:28:05,559 INFO [train.py:892] (0/4) Epoch 5, batch 750, loss[loss=0.2826, simple_loss=0.3189, pruned_loss=0.1232, over 19875.00 frames. ], tot_loss[loss=0.3001, simple_loss=0.3374, pruned_loss=0.1314, over 3856853.03 frames. ], batch size: 125, lr: 2.93e-02, grad_scale: 8.0 +2023-03-27 19:29:16,504 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.022e+02 6.531e+02 7.679e+02 9.153e+02 1.674e+03, threshold=1.536e+03, percent-clipped=1.0 +2023-03-27 19:29:50,715 INFO [train.py:892] (0/4) Epoch 5, batch 800, loss[loss=0.2854, simple_loss=0.328, pruned_loss=0.1213, over 19885.00 frames. ], tot_loss[loss=0.2997, simple_loss=0.3371, pruned_loss=0.1312, over 3877064.47 frames. ], batch size: 92, lr: 2.92e-02, grad_scale: 8.0 +2023-03-27 19:30:49,812 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-27 19:31:25,602 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8265.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:31:36,263 INFO [train.py:892] (0/4) Epoch 5, batch 850, loss[loss=0.2889, simple_loss=0.3304, pruned_loss=0.1237, over 19772.00 frames. ], tot_loss[loss=0.2982, simple_loss=0.336, pruned_loss=0.1302, over 3892886.62 frames. ], batch size: 69, lr: 2.92e-02, grad_scale: 8.0 +2023-03-27 19:31:57,774 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8280.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:32:20,780 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8478, 4.8213, 5.3346, 5.1403, 5.1196, 4.6383, 4.9826, 4.9648], + device='cuda:0'), covar=tensor([0.1342, 0.0790, 0.0806, 0.0755, 0.0770, 0.0893, 0.1759, 0.2044], + device='cuda:0'), in_proj_covar=tensor([0.0193, 0.0165, 0.0230, 0.0186, 0.0181, 0.0174, 0.0217, 0.0263], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-27 19:32:32,667 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2970, 3.3389, 2.2410, 3.6221, 3.7268, 1.5425, 3.0593, 3.0773], + device='cuda:0'), covar=tensor([0.0585, 0.0708, 0.2287, 0.0285, 0.0188, 0.2721, 0.0799, 0.0428], + device='cuda:0'), in_proj_covar=tensor([0.0129, 0.0146, 0.0175, 0.0097, 0.0072, 0.0170, 0.0166, 0.0107], + device='cuda:0'), out_proj_covar=tensor([1.5015e-04, 1.6802e-04, 1.9132e-04, 1.2016e-04, 8.5771e-05, 1.8147e-04, + 1.8650e-04, 1.1879e-04], device='cuda:0') +2023-03-27 19:32:39,102 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-27 19:32:49,196 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.707e+02 5.455e+02 6.919e+02 8.386e+02 1.759e+03, threshold=1.384e+03, percent-clipped=2.0 +2023-03-27 19:33:22,217 INFO [train.py:892] (0/4) Epoch 5, batch 900, loss[loss=0.3225, simple_loss=0.3587, pruned_loss=0.1431, over 19820.00 frames. ], tot_loss[loss=0.299, simple_loss=0.3369, pruned_loss=0.1306, over 3906277.97 frames. ], batch size: 72, lr: 2.91e-02, grad_scale: 16.0 +2023-03-27 19:33:34,287 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8326.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:33:37,959 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8328.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:35:10,577 INFO [train.py:892] (0/4) Epoch 5, batch 950, loss[loss=0.2886, simple_loss=0.3396, pruned_loss=0.1188, over 19803.00 frames. ], tot_loss[loss=0.2992, simple_loss=0.3371, pruned_loss=0.1307, over 3917849.21 frames. ], batch size: 79, lr: 2.91e-02, grad_scale: 16.0 +2023-03-27 19:35:50,364 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.6852, 1.9785, 1.5146, 1.1382, 1.7935, 2.0254, 1.9580, 1.7213], + device='cuda:0'), covar=tensor([0.0299, 0.0298, 0.0229, 0.0714, 0.0401, 0.0302, 0.0151, 0.0409], + device='cuda:0'), in_proj_covar=tensor([0.0035, 0.0032, 0.0034, 0.0049, 0.0049, 0.0032, 0.0026, 0.0031], + device='cuda:0'), out_proj_covar=tensor([7.0302e-05, 6.8137e-05, 6.7976e-05, 1.0132e-04, 9.9843e-05, 6.6210e-05, + 5.4733e-05, 6.4633e-05], device='cuda:0') +2023-03-27 19:35:53,584 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.6123, 5.9124, 5.9243, 5.8224, 5.7130, 5.8880, 5.2621, 5.3627], + device='cuda:0'), covar=tensor([0.0280, 0.0295, 0.0635, 0.0312, 0.0512, 0.0501, 0.0522, 0.0924], + device='cuda:0'), in_proj_covar=tensor([0.0132, 0.0127, 0.0186, 0.0139, 0.0137, 0.0122, 0.0156, 0.0194], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 19:36:10,027 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8399.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:36:23,192 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.522e+02 6.186e+02 7.459e+02 8.919e+02 2.032e+03, threshold=1.492e+03, percent-clipped=3.0 +2023-03-27 19:36:55,722 INFO [train.py:892] (0/4) Epoch 5, batch 1000, loss[loss=0.2657, simple_loss=0.3167, pruned_loss=0.1073, over 19704.00 frames. ], tot_loss[loss=0.3009, simple_loss=0.3384, pruned_loss=0.1318, over 3924029.15 frames. ], batch size: 48, lr: 2.90e-02, grad_scale: 16.0 +2023-03-27 19:37:45,555 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8388, 2.7487, 1.5305, 3.3646, 3.2551, 3.3829, 3.3958, 2.7339], + device='cuda:0'), covar=tensor([0.0538, 0.0466, 0.1522, 0.0415, 0.0367, 0.0302, 0.0409, 0.0576], + device='cuda:0'), in_proj_covar=tensor([0.0090, 0.0075, 0.0107, 0.0085, 0.0074, 0.0062, 0.0074, 0.0084], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 19:37:48,068 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-27 19:37:53,186 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8447.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:38:15,206 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8458.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:38:18,702 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.7745, 1.8315, 2.2440, 1.8368, 2.0403, 1.5506, 2.0480, 2.2285], + device='cuda:0'), covar=tensor([0.0555, 0.0344, 0.0294, 0.0601, 0.0322, 0.0369, 0.0338, 0.0201], + device='cuda:0'), in_proj_covar=tensor([0.0043, 0.0038, 0.0040, 0.0059, 0.0040, 0.0034, 0.0037, 0.0034], + device='cuda:0'), out_proj_covar=tensor([9.8709e-05, 9.1086e-05, 9.4149e-05, 1.2462e-04, 9.3395e-05, 8.1715e-05, + 8.9207e-05, 8.0556e-05], device='cuda:0') +2023-03-27 19:38:41,300 INFO [train.py:892] (0/4) Epoch 5, batch 1050, loss[loss=0.2755, simple_loss=0.3201, pruned_loss=0.1155, over 19613.00 frames. ], tot_loss[loss=0.3027, simple_loss=0.3397, pruned_loss=0.1328, over 3929738.36 frames. ], batch size: 46, lr: 2.89e-02, grad_scale: 16.0 +2023-03-27 19:39:52,153 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.605e+02 6.353e+02 7.329e+02 8.982e+02 1.532e+03, threshold=1.466e+03, percent-clipped=3.0 +2023-03-27 19:39:54,941 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8506.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:40:00,243 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7284, 2.4421, 1.3360, 3.0466, 3.1418, 2.9697, 3.1969, 2.4809], + device='cuda:0'), covar=tensor([0.0509, 0.0578, 0.1681, 0.0393, 0.0357, 0.0372, 0.0339, 0.0634], + device='cuda:0'), in_proj_covar=tensor([0.0088, 0.0075, 0.0107, 0.0084, 0.0073, 0.0062, 0.0074, 0.0083], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 19:40:14,535 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7352, 4.2335, 4.2844, 4.8347, 4.3839, 4.8085, 4.7850, 4.9650], + device='cuda:0'), covar=tensor([0.0512, 0.0296, 0.0427, 0.0187, 0.0420, 0.0162, 0.0297, 0.0247], + device='cuda:0'), in_proj_covar=tensor([0.0100, 0.0111, 0.0127, 0.0109, 0.0107, 0.0085, 0.0112, 0.0125], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 19:40:27,773 INFO [train.py:892] (0/4) Epoch 5, batch 1100, loss[loss=0.2861, simple_loss=0.3176, pruned_loss=0.1273, over 19827.00 frames. ], tot_loss[loss=0.3016, simple_loss=0.3391, pruned_loss=0.1321, over 3933342.47 frames. ], batch size: 166, lr: 2.89e-02, grad_scale: 16.0 +2023-03-27 19:41:35,574 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.80 vs. limit=2.0 +2023-03-27 19:42:10,841 INFO [train.py:892] (0/4) Epoch 5, batch 1150, loss[loss=0.2967, simple_loss=0.3361, pruned_loss=0.1286, over 19555.00 frames. ], tot_loss[loss=0.3014, simple_loss=0.3395, pruned_loss=0.1316, over 3935851.47 frames. ], batch size: 47, lr: 2.88e-02, grad_scale: 16.0 +2023-03-27 19:43:21,775 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.231e+02 6.114e+02 7.456e+02 8.704e+02 1.962e+03, threshold=1.491e+03, percent-clipped=1.0 +2023-03-27 19:43:23,341 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.79 vs. limit=5.0 +2023-03-27 19:43:56,824 INFO [train.py:892] (0/4) Epoch 5, batch 1200, loss[loss=0.357, simple_loss=0.3798, pruned_loss=0.1671, over 19773.00 frames. ], tot_loss[loss=0.302, simple_loss=0.3397, pruned_loss=0.1322, over 3938554.62 frames. ], batch size: 247, lr: 2.87e-02, grad_scale: 16.0 +2023-03-27 19:43:57,585 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8621.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:45:29,348 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4462, 2.1768, 2.5991, 2.1766, 2.4593, 2.1039, 2.5794, 2.4289], + device='cuda:0'), covar=tensor([0.0431, 0.0405, 0.0493, 0.0487, 0.0357, 0.0370, 0.0298, 0.0261], + device='cuda:0'), in_proj_covar=tensor([0.0044, 0.0040, 0.0041, 0.0060, 0.0041, 0.0036, 0.0037, 0.0034], + device='cuda:0'), out_proj_covar=tensor([1.0317e-04, 9.5919e-05, 9.6571e-05, 1.2879e-04, 9.6208e-05, 8.6543e-05, + 9.1852e-05, 8.2599e-05], device='cuda:0') +2023-03-27 19:45:41,163 INFO [train.py:892] (0/4) Epoch 5, batch 1250, loss[loss=0.2394, simple_loss=0.2786, pruned_loss=0.1001, over 19803.00 frames. ], tot_loss[loss=0.3015, simple_loss=0.3394, pruned_loss=0.1318, over 3939843.21 frames. ], batch size: 114, lr: 2.87e-02, grad_scale: 16.0 +2023-03-27 19:46:53,148 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.783e+02 6.822e+02 8.338e+02 1.055e+03 2.150e+03, threshold=1.668e+03, percent-clipped=7.0 +2023-03-27 19:47:02,124 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9570, 4.5220, 4.8619, 4.6152, 4.9065, 3.4412, 3.9725, 3.8148], + device='cuda:0'), covar=tensor([0.0185, 0.0125, 0.0112, 0.0138, 0.0111, 0.0662, 0.0840, 0.0483], + device='cuda:0'), in_proj_covar=tensor([0.0060, 0.0073, 0.0070, 0.0080, 0.0071, 0.0095, 0.0106, 0.0087], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 19:47:24,982 INFO [train.py:892] (0/4) Epoch 5, batch 1300, loss[loss=0.2751, simple_loss=0.3162, pruned_loss=0.117, over 19834.00 frames. ], tot_loss[loss=0.3, simple_loss=0.3382, pruned_loss=0.1309, over 3942452.23 frames. ], batch size: 166, lr: 2.86e-02, grad_scale: 16.0 +2023-03-27 19:48:23,003 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.65 vs. limit=2.0 +2023-03-27 19:48:48,213 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8760.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:49:09,894 INFO [train.py:892] (0/4) Epoch 5, batch 1350, loss[loss=0.4604, simple_loss=0.4602, pruned_loss=0.2303, over 19388.00 frames. ], tot_loss[loss=0.3017, simple_loss=0.3395, pruned_loss=0.1319, over 3943973.03 frames. ], batch size: 412, lr: 2.86e-02, grad_scale: 16.0 +2023-03-27 19:49:29,125 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5723, 1.9773, 3.2252, 3.0368, 3.2996, 3.9001, 3.8702, 3.7722], + device='cuda:0'), covar=tensor([0.0838, 0.2283, 0.0440, 0.0456, 0.0368, 0.0127, 0.0233, 0.0335], + device='cuda:0'), in_proj_covar=tensor([0.0093, 0.0154, 0.0097, 0.0090, 0.0075, 0.0076, 0.0067, 0.0076], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-27 19:50:02,486 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2164, 4.6099, 5.0444, 4.7392, 5.1374, 3.3626, 3.8662, 3.9708], + device='cuda:0'), covar=tensor([0.0193, 0.0131, 0.0101, 0.0128, 0.0110, 0.0610, 0.1059, 0.0462], + device='cuda:0'), in_proj_covar=tensor([0.0060, 0.0072, 0.0070, 0.0079, 0.0070, 0.0095, 0.0105, 0.0087], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 19:50:22,594 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.065e+02 5.700e+02 7.084e+02 8.473e+02 1.458e+03, threshold=1.417e+03, percent-clipped=0.0 +2023-03-27 19:50:58,489 INFO [train.py:892] (0/4) Epoch 5, batch 1400, loss[loss=0.2885, simple_loss=0.3249, pruned_loss=0.1261, over 19834.00 frames. ], tot_loss[loss=0.3007, simple_loss=0.3384, pruned_loss=0.1314, over 3942729.79 frames. ], batch size: 146, lr: 2.85e-02, grad_scale: 16.0 +2023-03-27 19:50:59,420 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8821.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:52:14,545 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.5793, 1.6121, 1.6626, 1.5375, 1.0163, 1.1473, 1.3328, 1.6425], + device='cuda:0'), covar=tensor([0.0587, 0.0298, 0.0257, 0.0310, 0.0404, 0.0625, 0.0581, 0.0354], + device='cuda:0'), in_proj_covar=tensor([0.0032, 0.0033, 0.0033, 0.0028, 0.0033, 0.0034, 0.0040, 0.0032], + device='cuda:0'), out_proj_covar=tensor([6.6379e-05, 6.7138e-05, 6.7874e-05, 5.7134e-05, 6.9164e-05, 7.0376e-05, + 8.1108e-05, 6.7388e-05], device='cuda:0') +2023-03-27 19:52:18,737 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.76 vs. limit=2.0 +2023-03-27 19:52:40,279 INFO [train.py:892] (0/4) Epoch 5, batch 1450, loss[loss=0.2946, simple_loss=0.3312, pruned_loss=0.129, over 19810.00 frames. ], tot_loss[loss=0.2991, simple_loss=0.3375, pruned_loss=0.1304, over 3944948.61 frames. ], batch size: 202, lr: 2.84e-02, grad_scale: 16.0 +2023-03-27 19:53:52,854 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.344e+02 6.588e+02 7.767e+02 1.033e+03 1.538e+03, threshold=1.553e+03, percent-clipped=3.0 +2023-03-27 19:54:06,796 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9378, 5.2508, 5.2134, 5.2375, 5.0528, 5.1947, 4.6705, 4.6996], + device='cuda:0'), covar=tensor([0.0327, 0.0290, 0.0572, 0.0370, 0.0535, 0.0564, 0.0503, 0.0925], + device='cuda:0'), in_proj_covar=tensor([0.0138, 0.0133, 0.0189, 0.0144, 0.0142, 0.0129, 0.0163, 0.0199], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 19:54:26,589 INFO [train.py:892] (0/4) Epoch 5, batch 1500, loss[loss=0.2383, simple_loss=0.2871, pruned_loss=0.09474, over 19837.00 frames. ], tot_loss[loss=0.296, simple_loss=0.3351, pruned_loss=0.1285, over 3946949.93 frames. ], batch size: 90, lr: 2.84e-02, grad_scale: 16.0 +2023-03-27 19:54:27,612 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8921.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:54:40,856 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8927.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:55:10,764 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8442, 2.7498, 3.9849, 3.2451, 3.5443, 4.1533, 2.3732, 2.4126], + device='cuda:0'), covar=tensor([0.0579, 0.2979, 0.0344, 0.0489, 0.1042, 0.0247, 0.1017, 0.1560], + device='cuda:0'), in_proj_covar=tensor([0.0223, 0.0279, 0.0164, 0.0158, 0.0253, 0.0142, 0.0178, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 19:56:10,509 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8969.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:56:13,845 INFO [train.py:892] (0/4) Epoch 5, batch 1550, loss[loss=0.2689, simple_loss=0.3022, pruned_loss=0.1178, over 19811.00 frames. ], tot_loss[loss=0.2938, simple_loss=0.3338, pruned_loss=0.1269, over 3947687.03 frames. ], batch size: 149, lr: 2.83e-02, grad_scale: 16.0 +2023-03-27 19:56:45,236 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8986.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:56:50,804 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8988.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:57:26,090 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.607e+02 5.861e+02 6.933e+02 8.857e+02 2.450e+03, threshold=1.387e+03, percent-clipped=2.0 +2023-03-27 19:57:42,802 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9013.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:57:52,730 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6889, 2.4598, 3.4305, 2.5135, 3.1182, 2.4896, 2.7165, 3.2940], + device='cuda:0'), covar=tensor([0.0888, 0.0370, 0.0354, 0.0561, 0.0330, 0.0433, 0.0327, 0.0211], + device='cuda:0'), in_proj_covar=tensor([0.0045, 0.0041, 0.0042, 0.0061, 0.0043, 0.0037, 0.0039, 0.0035], + device='cuda:0'), out_proj_covar=tensor([1.0774e-04, 1.0074e-04, 1.0191e-04, 1.3375e-04, 1.0337e-04, 9.1217e-05, + 9.7394e-05, 8.5798e-05], device='cuda:0') +2023-03-27 19:57:57,840 INFO [train.py:892] (0/4) Epoch 5, batch 1600, loss[loss=0.2813, simple_loss=0.3282, pruned_loss=0.1172, over 19814.00 frames. ], tot_loss[loss=0.2929, simple_loss=0.3331, pruned_loss=0.1264, over 3948702.37 frames. ], batch size: 98, lr: 2.83e-02, grad_scale: 16.0 +2023-03-27 19:58:41,650 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8264, 3.1529, 4.6544, 3.9304, 4.1895, 4.6915, 4.7937, 4.5298], + device='cuda:0'), covar=tensor([0.0113, 0.0486, 0.0100, 0.1057, 0.0103, 0.0135, 0.0085, 0.0107], + device='cuda:0'), in_proj_covar=tensor([0.0057, 0.0064, 0.0056, 0.0131, 0.0051, 0.0058, 0.0056, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0003, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-03-27 19:58:52,831 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9047.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 19:59:40,270 INFO [train.py:892] (0/4) Epoch 5, batch 1650, loss[loss=0.2917, simple_loss=0.3354, pruned_loss=0.124, over 19662.00 frames. ], tot_loss[loss=0.2944, simple_loss=0.334, pruned_loss=0.1274, over 3948855.80 frames. ], batch size: 58, lr: 2.82e-02, grad_scale: 16.0 +2023-03-27 19:59:47,116 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9074.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:00:51,663 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.078e+02 6.548e+02 7.778e+02 9.936e+02 1.704e+03, threshold=1.556e+03, percent-clipped=1.0 +2023-03-27 20:01:10,857 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9688, 3.0255, 3.9564, 3.3070, 3.4679, 3.7924, 3.6927, 3.7238], + device='cuda:0'), covar=tensor([0.0118, 0.0388, 0.0090, 0.1021, 0.0103, 0.0160, 0.0134, 0.0125], + device='cuda:0'), in_proj_covar=tensor([0.0058, 0.0064, 0.0056, 0.0132, 0.0051, 0.0058, 0.0057, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0003, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-03-27 20:01:16,359 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9116.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:01:24,662 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7185, 2.4796, 1.3546, 3.2986, 3.0271, 3.1606, 3.2736, 2.6806], + device='cuda:0'), covar=tensor([0.0576, 0.0601, 0.2188, 0.0384, 0.0423, 0.0370, 0.0367, 0.0557], + device='cuda:0'), in_proj_covar=tensor([0.0093, 0.0079, 0.0110, 0.0088, 0.0077, 0.0067, 0.0078, 0.0090], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 20:01:25,585 INFO [train.py:892] (0/4) Epoch 5, batch 1700, loss[loss=0.2557, simple_loss=0.2908, pruned_loss=0.1103, over 19832.00 frames. ], tot_loss[loss=0.2948, simple_loss=0.3344, pruned_loss=0.1276, over 3946784.45 frames. ], batch size: 146, lr: 2.81e-02, grad_scale: 16.0 +2023-03-27 20:02:41,068 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.2195, 2.2776, 1.7225, 1.5326, 1.9353, 2.3855, 2.3157, 2.4790], + device='cuda:0'), covar=tensor([0.0233, 0.0524, 0.0317, 0.0880, 0.0605, 0.0508, 0.0189, 0.0268], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0035, 0.0036, 0.0052, 0.0052, 0.0034, 0.0027, 0.0032], + device='cuda:0'), out_proj_covar=tensor([7.8713e-05, 7.5240e-05, 7.4669e-05, 1.1096e-04, 1.0935e-04, 7.4202e-05, + 5.9891e-05, 6.8714e-05], device='cuda:0') +2023-03-27 20:03:04,910 INFO [train.py:892] (0/4) Epoch 5, batch 1750, loss[loss=0.3127, simple_loss=0.3345, pruned_loss=0.1454, over 19789.00 frames. ], tot_loss[loss=0.2951, simple_loss=0.3343, pruned_loss=0.1279, over 3947273.81 frames. ], batch size: 163, lr: 2.81e-02, grad_scale: 16.0 +2023-03-27 20:03:59,484 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.58 vs. limit=2.0 +2023-03-27 20:04:05,091 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.671e+02 6.292e+02 7.596e+02 9.035e+02 1.874e+03, threshold=1.519e+03, percent-clipped=3.0 +2023-03-27 20:04:33,321 INFO [train.py:892] (0/4) Epoch 5, batch 1800, loss[loss=0.2704, simple_loss=0.3169, pruned_loss=0.112, over 19693.00 frames. ], tot_loss[loss=0.2957, simple_loss=0.3347, pruned_loss=0.1283, over 3947730.57 frames. ], batch size: 59, lr: 2.80e-02, grad_scale: 16.0 +2023-03-27 20:05:41,955 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0501, 3.3638, 3.4845, 4.6938, 2.7922, 3.6755, 3.0056, 2.1920], + device='cuda:0'), covar=tensor([0.0508, 0.2426, 0.0908, 0.0098, 0.2242, 0.0466, 0.0942, 0.2163], + device='cuda:0'), in_proj_covar=tensor([0.0143, 0.0310, 0.0182, 0.0101, 0.0217, 0.0136, 0.0164, 0.0195], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 20:06:00,367 INFO [train.py:892] (0/4) Epoch 5, batch 1850, loss[loss=0.2858, simple_loss=0.3399, pruned_loss=0.1158, over 19810.00 frames. ], tot_loss[loss=0.2955, simple_loss=0.3351, pruned_loss=0.1279, over 3949598.14 frames. ], batch size: 57, lr: 2.80e-02, grad_scale: 16.0 +2023-03-27 20:06:07,744 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-5.pt +2023-03-27 20:06:59,472 INFO [train.py:892] (0/4) Epoch 6, batch 0, loss[loss=0.2566, simple_loss=0.3019, pruned_loss=0.1056, over 19733.00 frames. ], tot_loss[loss=0.2566, simple_loss=0.3019, pruned_loss=0.1056, over 19733.00 frames. ], batch size: 118, lr: 2.61e-02, grad_scale: 16.0 +2023-03-27 20:06:59,473 INFO [train.py:917] (0/4) Computing validation loss +2023-03-27 20:07:11,550 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4984, 4.2688, 4.2868, 4.1158, 4.5206, 3.2610, 3.5582, 3.5638], + device='cuda:0'), covar=tensor([0.0151, 0.0151, 0.0153, 0.0159, 0.0116, 0.0712, 0.0938, 0.0442], + device='cuda:0'), in_proj_covar=tensor([0.0061, 0.0075, 0.0075, 0.0083, 0.0075, 0.0098, 0.0111, 0.0093], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 20:07:25,895 INFO [train.py:926] (0/4) Epoch 6, validation: loss=0.2048, simple_loss=0.2829, pruned_loss=0.06328, over 2883724.00 frames. +2023-03-27 20:07:25,896 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-27 20:07:45,118 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9283.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:07:53,767 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-27 20:08:30,071 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.239e+02 6.201e+02 7.498e+02 9.060e+02 1.792e+03, threshold=1.500e+03, percent-clipped=2.0 +2023-03-27 20:09:14,665 INFO [train.py:892] (0/4) Epoch 6, batch 50, loss[loss=0.2339, simple_loss=0.2806, pruned_loss=0.09361, over 19727.00 frames. ], tot_loss[loss=0.2775, simple_loss=0.3187, pruned_loss=0.1182, over 891611.84 frames. ], batch size: 63, lr: 2.60e-02, grad_scale: 16.0 +2023-03-27 20:09:22,913 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9038, 5.1861, 5.1803, 5.2044, 4.8753, 5.1631, 4.6256, 4.7257], + device='cuda:0'), covar=tensor([0.0351, 0.0334, 0.0616, 0.0351, 0.0494, 0.0541, 0.0584, 0.0879], + device='cuda:0'), in_proj_covar=tensor([0.0138, 0.0132, 0.0191, 0.0144, 0.0141, 0.0125, 0.0162, 0.0198], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 20:09:48,472 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9342.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:10:17,663 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8776, 1.8702, 3.1341, 3.1184, 3.4149, 3.6064, 3.6558, 3.7805], + device='cuda:0'), covar=tensor([0.0576, 0.2168, 0.0472, 0.0433, 0.0339, 0.0157, 0.0194, 0.0216], + device='cuda:0'), in_proj_covar=tensor([0.0098, 0.0154, 0.0101, 0.0094, 0.0078, 0.0076, 0.0071, 0.0075], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-27 20:10:46,059 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9369.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:10:54,302 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-27 20:10:59,009 INFO [train.py:892] (0/4) Epoch 6, batch 100, loss[loss=0.264, simple_loss=0.3061, pruned_loss=0.1109, over 19878.00 frames. ], tot_loss[loss=0.2777, simple_loss=0.3199, pruned_loss=0.1177, over 1570575.51 frames. ], batch size: 136, lr: 2.60e-02, grad_scale: 16.0 +2023-03-27 20:11:01,953 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6422, 2.2967, 3.0142, 2.0674, 2.7896, 2.1379, 2.3956, 2.8425], + device='cuda:0'), covar=tensor([0.0547, 0.0605, 0.0372, 0.0596, 0.0276, 0.0406, 0.0365, 0.0197], + device='cuda:0'), in_proj_covar=tensor([0.0045, 0.0040, 0.0042, 0.0061, 0.0043, 0.0036, 0.0039, 0.0034], + device='cuda:0'), out_proj_covar=tensor([1.0898e-04, 1.0054e-04, 1.0347e-04, 1.3792e-04, 1.0338e-04, 9.1075e-05, + 9.9193e-05, 8.4669e-05], device='cuda:0') +2023-03-27 20:12:00,061 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.134e+02 5.626e+02 6.778e+02 8.697e+02 1.693e+03, threshold=1.356e+03, percent-clipped=2.0 +2023-03-27 20:12:20,432 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9414.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:12:24,200 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9416.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:12:43,770 INFO [train.py:892] (0/4) Epoch 6, batch 150, loss[loss=0.2739, simple_loss=0.3186, pruned_loss=0.1146, over 19850.00 frames. ], tot_loss[loss=0.2827, simple_loss=0.3239, pruned_loss=0.1207, over 2097219.53 frames. ], batch size: 60, lr: 2.59e-02, grad_scale: 16.0 +2023-03-27 20:12:59,331 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4787, 1.7319, 2.7615, 2.9076, 3.1536, 3.2356, 3.1989, 3.2700], + device='cuda:0'), covar=tensor([0.0751, 0.2310, 0.0549, 0.0423, 0.0307, 0.0169, 0.0307, 0.0288], + device='cuda:0'), in_proj_covar=tensor([0.0099, 0.0156, 0.0101, 0.0095, 0.0079, 0.0077, 0.0072, 0.0077], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-27 20:13:01,806 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.96 vs. limit=2.0 +2023-03-27 20:14:08,336 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9464.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:14:32,707 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9475.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:14:33,655 INFO [train.py:892] (0/4) Epoch 6, batch 200, loss[loss=0.474, simple_loss=0.4712, pruned_loss=0.2384, over 19431.00 frames. ], tot_loss[loss=0.2852, simple_loss=0.3258, pruned_loss=0.1223, over 2508329.98 frames. ], batch size: 431, lr: 2.59e-02, grad_scale: 16.0 +2023-03-27 20:15:34,372 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.804e+02 5.747e+02 7.100e+02 9.043e+02 1.475e+03, threshold=1.420e+03, percent-clipped=5.0 +2023-03-27 20:16:16,343 INFO [train.py:892] (0/4) Epoch 6, batch 250, loss[loss=0.4375, simple_loss=0.443, pruned_loss=0.216, over 19612.00 frames. ], tot_loss[loss=0.2884, simple_loss=0.3285, pruned_loss=0.1241, over 2826786.53 frames. ], batch size: 387, lr: 2.58e-02, grad_scale: 16.0 +2023-03-27 20:18:00,755 INFO [train.py:892] (0/4) Epoch 6, batch 300, loss[loss=0.2693, simple_loss=0.3089, pruned_loss=0.1148, over 19795.00 frames. ], tot_loss[loss=0.2907, simple_loss=0.3308, pruned_loss=0.1253, over 3075639.71 frames. ], batch size: 185, lr: 2.58e-02, grad_scale: 16.0 +2023-03-27 20:18:19,739 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9583.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:18:21,640 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9584.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:19:02,334 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.913e+02 5.609e+02 7.067e+02 9.111e+02 1.524e+03, threshold=1.413e+03, percent-clipped=2.0 +2023-03-27 20:19:49,292 INFO [train.py:892] (0/4) Epoch 6, batch 350, loss[loss=0.2689, simple_loss=0.3103, pruned_loss=0.1138, over 19795.00 frames. ], tot_loss[loss=0.2866, simple_loss=0.3279, pruned_loss=0.1226, over 3269342.77 frames. ], batch size: 65, lr: 2.57e-02, grad_scale: 16.0 +2023-03-27 20:20:00,121 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9631.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:20:22,969 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9642.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:20:28,938 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9645.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:20:57,499 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9658.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:21:18,727 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9669.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:21:32,070 INFO [train.py:892] (0/4) Epoch 6, batch 400, loss[loss=0.2558, simple_loss=0.2953, pruned_loss=0.1082, over 19855.00 frames. ], tot_loss[loss=0.2842, simple_loss=0.3256, pruned_loss=0.1214, over 3421667.55 frames. ], batch size: 118, lr: 2.57e-02, grad_scale: 16.0 +2023-03-27 20:21:32,908 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8831, 4.2125, 4.4722, 4.1039, 3.8754, 4.2760, 4.0290, 4.5861], + device='cuda:0'), covar=tensor([0.1363, 0.0290, 0.0322, 0.0363, 0.0754, 0.0391, 0.0356, 0.0262], + device='cuda:0'), in_proj_covar=tensor([0.0208, 0.0150, 0.0145, 0.0143, 0.0150, 0.0140, 0.0129, 0.0127], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 20:22:03,521 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9690.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:22:32,708 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.696e+02 5.768e+02 7.171e+02 9.130e+02 1.746e+03, threshold=1.434e+03, percent-clipped=4.0 +2023-03-27 20:22:58,022 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9717.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:23:02,212 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9719.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:23:16,551 INFO [train.py:892] (0/4) Epoch 6, batch 450, loss[loss=0.2669, simple_loss=0.3109, pruned_loss=0.1114, over 19875.00 frames. ], tot_loss[loss=0.2851, simple_loss=0.3267, pruned_loss=0.1218, over 3539381.78 frames. ], batch size: 159, lr: 2.56e-02, grad_scale: 16.0 +2023-03-27 20:23:54,062 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9743.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:24:48,808 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4326, 2.2216, 2.8873, 2.0003, 2.6957, 1.9594, 2.4244, 2.7506], + device='cuda:0'), covar=tensor([0.0484, 0.0418, 0.0333, 0.0591, 0.0329, 0.0464, 0.0338, 0.0167], + device='cuda:0'), in_proj_covar=tensor([0.0044, 0.0040, 0.0044, 0.0062, 0.0044, 0.0037, 0.0039, 0.0035], + device='cuda:0'), out_proj_covar=tensor([1.0922e-04, 1.0196e-04, 1.0924e-04, 1.4229e-04, 1.0721e-04, 9.4856e-05, + 1.0001e-04, 8.7823e-05], device='cuda:0') +2023-03-27 20:24:50,416 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9770.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:25:02,467 INFO [train.py:892] (0/4) Epoch 6, batch 500, loss[loss=0.2838, simple_loss=0.3388, pruned_loss=0.1144, over 19734.00 frames. ], tot_loss[loss=0.2858, simple_loss=0.3273, pruned_loss=0.1221, over 3630092.20 frames. ], batch size: 50, lr: 2.56e-02, grad_scale: 16.0 +2023-03-27 20:25:50,033 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.9193, 2.2953, 1.9677, 1.2516, 1.8681, 2.4178, 2.2711, 2.2271], + device='cuda:0'), covar=tensor([0.0276, 0.0274, 0.0237, 0.0767, 0.0511, 0.0219, 0.0143, 0.0168], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0036, 0.0037, 0.0053, 0.0053, 0.0034, 0.0028, 0.0032], + device='cuda:0'), out_proj_covar=tensor([7.7344e-05, 7.8522e-05, 7.7528e-05, 1.1541e-04, 1.1383e-04, 7.5297e-05, + 6.1941e-05, 6.8323e-05], device='cuda:0') +2023-03-27 20:26:01,099 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9804.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:26:01,919 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.739e+02 6.410e+02 7.467e+02 9.319e+02 1.836e+03, threshold=1.493e+03, percent-clipped=5.0 +2023-03-27 20:26:44,456 INFO [train.py:892] (0/4) Epoch 6, batch 550, loss[loss=0.3209, simple_loss=0.377, pruned_loss=0.1324, over 19557.00 frames. ], tot_loss[loss=0.2861, simple_loss=0.3271, pruned_loss=0.1225, over 3700996.76 frames. ], batch size: 53, lr: 2.55e-02, grad_scale: 16.0 +2023-03-27 20:28:19,270 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0764, 3.8023, 5.0181, 4.1348, 4.2512, 4.7655, 4.9078, 4.4528], + device='cuda:0'), covar=tensor([0.0093, 0.0364, 0.0065, 0.1164, 0.0094, 0.0126, 0.0085, 0.0103], + device='cuda:0'), in_proj_covar=tensor([0.0060, 0.0068, 0.0057, 0.0132, 0.0052, 0.0060, 0.0059, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0003, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-03-27 20:28:30,654 INFO [train.py:892] (0/4) Epoch 6, batch 600, loss[loss=0.3882, simple_loss=0.4304, pruned_loss=0.173, over 18953.00 frames. ], tot_loss[loss=0.2865, simple_loss=0.3278, pruned_loss=0.1227, over 3754310.61 frames. ], batch size: 514, lr: 2.54e-02, grad_scale: 16.0 +2023-03-27 20:28:47,679 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0575, 3.2470, 3.6240, 4.2296, 2.6728, 3.6041, 2.5446, 2.3821], + device='cuda:0'), covar=tensor([0.0299, 0.2522, 0.0674, 0.0112, 0.2116, 0.0347, 0.1023, 0.1889], + device='cuda:0'), in_proj_covar=tensor([0.0143, 0.0319, 0.0189, 0.0104, 0.0221, 0.0137, 0.0173, 0.0197], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 20:29:32,381 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.880e+02 5.938e+02 7.019e+02 8.775e+02 1.647e+03, threshold=1.404e+03, percent-clipped=2.0 +2023-03-27 20:30:16,201 INFO [train.py:892] (0/4) Epoch 6, batch 650, loss[loss=0.2602, simple_loss=0.3001, pruned_loss=0.1101, over 19734.00 frames. ], tot_loss[loss=0.2847, simple_loss=0.3263, pruned_loss=0.1215, over 3798282.88 frames. ], batch size: 179, lr: 2.54e-02, grad_scale: 16.0 +2023-03-27 20:30:45,610 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9940.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:30:51,422 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4291, 3.6943, 3.9274, 4.9952, 2.8742, 3.7739, 3.2295, 2.6404], + device='cuda:0'), covar=tensor([0.0325, 0.2370, 0.0692, 0.0091, 0.2235, 0.0429, 0.0901, 0.1934], + device='cuda:0'), in_proj_covar=tensor([0.0141, 0.0312, 0.0186, 0.0103, 0.0216, 0.0133, 0.0169, 0.0195], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 20:32:00,348 INFO [train.py:892] (0/4) Epoch 6, batch 700, loss[loss=0.2422, simple_loss=0.3003, pruned_loss=0.09204, over 19736.00 frames. ], tot_loss[loss=0.2839, simple_loss=0.3261, pruned_loss=0.1209, over 3832202.19 frames. ], batch size: 47, lr: 2.53e-02, grad_scale: 16.0 +2023-03-27 20:32:33,019 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9407, 3.9342, 3.9824, 3.7160, 4.0608, 3.0783, 3.1781, 2.6579], + device='cuda:0'), covar=tensor([0.0294, 0.0218, 0.0208, 0.0212, 0.0206, 0.0840, 0.1246, 0.0997], + device='cuda:0'), in_proj_covar=tensor([0.0064, 0.0080, 0.0079, 0.0088, 0.0080, 0.0102, 0.0114, 0.0096], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-27 20:32:51,304 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-10000.pt +2023-03-27 20:33:05,035 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.264e+02 6.107e+02 7.493e+02 9.243e+02 1.709e+03, threshold=1.499e+03, percent-clipped=3.0 +2023-03-27 20:33:24,751 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10014.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:33:49,829 INFO [train.py:892] (0/4) Epoch 6, batch 750, loss[loss=0.2522, simple_loss=0.3058, pruned_loss=0.09934, over 19599.00 frames. ], tot_loss[loss=0.2822, simple_loss=0.3247, pruned_loss=0.1198, over 3857020.94 frames. ], batch size: 42, lr: 2.53e-02, grad_scale: 16.0 +2023-03-27 20:34:17,074 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10039.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:35:04,707 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10062.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 20:35:21,477 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8907, 3.0429, 3.4439, 4.1140, 2.5808, 3.3243, 2.7388, 2.3803], + device='cuda:0'), covar=tensor([0.0356, 0.2551, 0.0735, 0.0119, 0.2103, 0.0447, 0.0900, 0.1797], + device='cuda:0'), in_proj_covar=tensor([0.0143, 0.0316, 0.0188, 0.0104, 0.0216, 0.0134, 0.0168, 0.0196], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 20:35:23,426 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10070.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:35:34,019 INFO [train.py:892] (0/4) Epoch 6, batch 800, loss[loss=0.3483, simple_loss=0.3807, pruned_loss=0.1579, over 19756.00 frames. ], tot_loss[loss=0.2836, simple_loss=0.3259, pruned_loss=0.1207, over 3876973.05 frames. ], batch size: 276, lr: 2.52e-02, grad_scale: 16.0 +2023-03-27 20:35:47,709 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5834, 4.4148, 4.9649, 4.7814, 4.7801, 4.1562, 4.6694, 4.5311], + device='cuda:0'), covar=tensor([0.1268, 0.1203, 0.0914, 0.0901, 0.0840, 0.1241, 0.2089, 0.2537], + device='cuda:0'), in_proj_covar=tensor([0.0205, 0.0176, 0.0244, 0.0191, 0.0188, 0.0185, 0.0235, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-27 20:36:21,590 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10099.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:36:23,893 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10100.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 20:36:35,377 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.822e+02 5.825e+02 7.423e+02 9.383e+02 2.108e+03, threshold=1.485e+03, percent-clipped=8.0 +2023-03-27 20:36:46,038 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.3102, 2.4545, 2.5876, 1.9782, 2.4612, 2.1721, 2.5328, 2.5895], + device='cuda:0'), covar=tensor([0.0470, 0.0277, 0.0370, 0.0620, 0.0392, 0.0288, 0.0316, 0.0211], + device='cuda:0'), in_proj_covar=tensor([0.0045, 0.0041, 0.0047, 0.0067, 0.0044, 0.0038, 0.0041, 0.0037], + device='cuda:0'), out_proj_covar=tensor([1.1381e-04, 1.0580e-04, 1.1838e-04, 1.5445e-04, 1.0988e-04, 9.9869e-05, + 1.0679e-04, 9.4468e-05], device='cuda:0') +2023-03-27 20:37:01,042 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10118.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:37:12,853 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10123.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 20:37:17,775 INFO [train.py:892] (0/4) Epoch 6, batch 850, loss[loss=0.2974, simple_loss=0.3292, pruned_loss=0.1327, over 19875.00 frames. ], tot_loss[loss=0.2842, simple_loss=0.3265, pruned_loss=0.1209, over 3893569.63 frames. ], batch size: 159, lr: 2.52e-02, grad_scale: 16.0 +2023-03-27 20:39:04,575 INFO [train.py:892] (0/4) Epoch 6, batch 900, loss[loss=0.2869, simple_loss=0.3348, pruned_loss=0.1195, over 19718.00 frames. ], tot_loss[loss=0.2815, simple_loss=0.3243, pruned_loss=0.1193, over 3907242.65 frames. ], batch size: 54, lr: 2.51e-02, grad_scale: 16.0 +2023-03-27 20:39:50,064 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.64 vs. limit=2.0 +2023-03-27 20:40:01,064 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-27 20:40:03,603 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.864e+02 5.709e+02 6.647e+02 8.458e+02 1.607e+03, threshold=1.329e+03, percent-clipped=2.0 +2023-03-27 20:40:48,894 INFO [train.py:892] (0/4) Epoch 6, batch 950, loss[loss=0.2714, simple_loss=0.3017, pruned_loss=0.1206, over 19794.00 frames. ], tot_loss[loss=0.2809, simple_loss=0.3238, pruned_loss=0.119, over 3916939.03 frames. ], batch size: 120, lr: 2.51e-02, grad_scale: 16.0 +2023-03-27 20:41:18,897 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10240.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:42:33,723 INFO [train.py:892] (0/4) Epoch 6, batch 1000, loss[loss=0.3438, simple_loss=0.3824, pruned_loss=0.1526, over 19756.00 frames. ], tot_loss[loss=0.2806, simple_loss=0.3237, pruned_loss=0.1187, over 3924527.54 frames. ], batch size: 321, lr: 2.50e-02, grad_scale: 16.0 +2023-03-27 20:43:00,864 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10288.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:43:16,089 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0792, 3.0963, 4.0822, 3.6857, 3.7224, 4.1454, 4.0290, 4.0393], + device='cuda:0'), covar=tensor([0.0115, 0.0393, 0.0090, 0.0743, 0.0102, 0.0161, 0.0123, 0.0091], + device='cuda:0'), in_proj_covar=tensor([0.0061, 0.0069, 0.0057, 0.0132, 0.0051, 0.0060, 0.0058, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0004, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-03-27 20:43:34,973 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.462e+02 6.368e+02 7.709e+02 9.244e+02 1.500e+03, threshold=1.542e+03, percent-clipped=2.0 +2023-03-27 20:43:55,013 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10314.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:44:20,093 INFO [train.py:892] (0/4) Epoch 6, batch 1050, loss[loss=0.3618, simple_loss=0.3821, pruned_loss=0.1708, over 19632.00 frames. ], tot_loss[loss=0.2805, simple_loss=0.3235, pruned_loss=0.1188, over 3930560.62 frames. ], batch size: 351, lr: 2.50e-02, grad_scale: 16.0 +2023-03-27 20:45:36,762 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10362.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:46:05,028 INFO [train.py:892] (0/4) Epoch 6, batch 1100, loss[loss=0.2202, simple_loss=0.2649, pruned_loss=0.08778, over 19886.00 frames. ], tot_loss[loss=0.2804, simple_loss=0.3236, pruned_loss=0.1186, over 3933813.50 frames. ], batch size: 134, lr: 2.49e-02, grad_scale: 16.0 +2023-03-27 20:46:47,168 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10395.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 20:46:55,553 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10399.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:47:09,514 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.844e+02 6.468e+02 7.471e+02 8.887e+02 1.453e+03, threshold=1.494e+03, percent-clipped=0.0 +2023-03-27 20:47:16,047 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10409.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:47:35,445 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10418.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 20:47:51,063 INFO [train.py:892] (0/4) Epoch 6, batch 1150, loss[loss=0.2968, simple_loss=0.3273, pruned_loss=0.1332, over 19705.00 frames. ], tot_loss[loss=0.2794, simple_loss=0.3228, pruned_loss=0.118, over 3937743.49 frames. ], batch size: 60, lr: 2.49e-02, grad_scale: 16.0 +2023-03-27 20:48:01,449 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2955, 3.4218, 4.8937, 3.5564, 4.2066, 4.8653, 2.5889, 2.7933], + device='cuda:0'), covar=tensor([0.0464, 0.2188, 0.0236, 0.0495, 0.1003, 0.0275, 0.1161, 0.1585], + device='cuda:0'), in_proj_covar=tensor([0.0241, 0.0290, 0.0185, 0.0170, 0.0270, 0.0163, 0.0200, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 20:48:36,870 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10447.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:48:51,884 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10454.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:49:22,987 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-27 20:49:24,660 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10470.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:49:35,675 INFO [train.py:892] (0/4) Epoch 6, batch 1200, loss[loss=0.2405, simple_loss=0.2901, pruned_loss=0.09542, over 19922.00 frames. ], tot_loss[loss=0.2787, simple_loss=0.3223, pruned_loss=0.1175, over 3941519.54 frames. ], batch size: 45, lr: 2.49e-02, grad_scale: 8.0 +2023-03-27 20:50:20,787 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4165, 2.5239, 3.0442, 3.4177, 2.2237, 2.7933, 2.3005, 2.0252], + device='cuda:0'), covar=tensor([0.0404, 0.2775, 0.0841, 0.0240, 0.2374, 0.0577, 0.1091, 0.2017], + device='cuda:0'), in_proj_covar=tensor([0.0149, 0.0321, 0.0194, 0.0108, 0.0223, 0.0142, 0.0171, 0.0200], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 20:50:41,659 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.428e+02 5.589e+02 6.626e+02 8.703e+02 1.747e+03, threshold=1.325e+03, percent-clipped=3.0 +2023-03-27 20:50:58,418 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10515.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 20:51:21,746 INFO [train.py:892] (0/4) Epoch 6, batch 1250, loss[loss=0.2858, simple_loss=0.3227, pruned_loss=0.1245, over 19804.00 frames. ], tot_loss[loss=0.2783, simple_loss=0.3217, pruned_loss=0.1174, over 3943545.84 frames. ], batch size: 181, lr: 2.48e-02, grad_scale: 8.0 +2023-03-27 20:51:59,943 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4883, 3.3733, 4.8941, 3.6698, 4.2402, 4.8098, 2.6303, 2.8282], + device='cuda:0'), covar=tensor([0.0435, 0.2560, 0.0228, 0.0510, 0.0967, 0.0272, 0.1055, 0.1605], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0290, 0.0186, 0.0168, 0.0269, 0.0164, 0.0200, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 20:53:08,407 INFO [train.py:892] (0/4) Epoch 6, batch 1300, loss[loss=0.2821, simple_loss=0.323, pruned_loss=0.1206, over 19787.00 frames. ], tot_loss[loss=0.2769, simple_loss=0.3213, pruned_loss=0.1162, over 3942946.56 frames. ], batch size: 168, lr: 2.48e-02, grad_scale: 8.0 +2023-03-27 20:54:11,551 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.477e+02 5.973e+02 6.933e+02 8.304e+02 2.000e+03, threshold=1.387e+03, percent-clipped=2.0 +2023-03-27 20:54:51,939 INFO [train.py:892] (0/4) Epoch 6, batch 1350, loss[loss=0.2371, simple_loss=0.2948, pruned_loss=0.08971, over 19902.00 frames. ], tot_loss[loss=0.2776, simple_loss=0.3219, pruned_loss=0.1167, over 3944444.27 frames. ], batch size: 116, lr: 2.47e-02, grad_scale: 8.0 +2023-03-27 20:56:39,324 INFO [train.py:892] (0/4) Epoch 6, batch 1400, loss[loss=0.2686, simple_loss=0.3039, pruned_loss=0.1167, over 19768.00 frames. ], tot_loss[loss=0.2783, simple_loss=0.3217, pruned_loss=0.1175, over 3945887.65 frames. ], batch size: 125, lr: 2.47e-02, grad_scale: 8.0 +2023-03-27 20:57:20,989 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10695.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:57:46,682 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.801e+02 6.022e+02 7.479e+02 9.663e+02 1.752e+03, threshold=1.496e+03, percent-clipped=3.0 +2023-03-27 20:58:11,769 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10718.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 20:58:26,786 INFO [train.py:892] (0/4) Epoch 6, batch 1450, loss[loss=0.2684, simple_loss=0.3032, pruned_loss=0.1168, over 19779.00 frames. ], tot_loss[loss=0.2779, simple_loss=0.3214, pruned_loss=0.1172, over 3947154.15 frames. ], batch size: 116, lr: 2.46e-02, grad_scale: 8.0 +2023-03-27 20:58:43,264 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1627, 4.3574, 4.7452, 4.2801, 4.1597, 4.6091, 4.4472, 4.9565], + device='cuda:0'), covar=tensor([0.1196, 0.0299, 0.0311, 0.0388, 0.0541, 0.0349, 0.0307, 0.0236], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0155, 0.0149, 0.0152, 0.0151, 0.0146, 0.0133, 0.0134], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 20:58:55,931 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.58 vs. limit=2.0 +2023-03-27 20:59:03,244 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10743.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:59:06,544 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10744.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:59:50,228 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10765.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 20:59:52,099 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10766.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 21:00:09,391 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10774.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:00:12,519 INFO [train.py:892] (0/4) Epoch 6, batch 1500, loss[loss=0.2392, simple_loss=0.2941, pruned_loss=0.09208, over 19591.00 frames. ], tot_loss[loss=0.2784, simple_loss=0.3219, pruned_loss=0.1174, over 3947339.33 frames. ], batch size: 45, lr: 2.46e-02, grad_scale: 8.0 +2023-03-27 21:01:14,052 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10805.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:01:16,851 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.160e+02 5.698e+02 7.148e+02 8.817e+02 1.909e+03, threshold=1.430e+03, percent-clipped=1.0 +2023-03-27 21:01:25,510 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10810.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 21:01:57,109 INFO [train.py:892] (0/4) Epoch 6, batch 1550, loss[loss=0.3729, simple_loss=0.3959, pruned_loss=0.175, over 19642.00 frames. ], tot_loss[loss=0.2791, simple_loss=0.3225, pruned_loss=0.1179, over 3947940.87 frames. ], batch size: 351, lr: 2.45e-02, grad_scale: 8.0 +2023-03-27 21:02:15,156 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10835.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:02:41,086 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6103, 4.9475, 4.8757, 4.8755, 4.5148, 4.6990, 4.3886, 4.3747], + device='cuda:0'), covar=tensor([0.0384, 0.0347, 0.0601, 0.0448, 0.0699, 0.0757, 0.0583, 0.0969], + device='cuda:0'), in_proj_covar=tensor([0.0145, 0.0141, 0.0194, 0.0156, 0.0150, 0.0139, 0.0171, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-27 21:03:41,428 INFO [train.py:892] (0/4) Epoch 6, batch 1600, loss[loss=0.3002, simple_loss=0.3361, pruned_loss=0.1322, over 19694.00 frames. ], tot_loss[loss=0.2781, simple_loss=0.3216, pruned_loss=0.1173, over 3948257.59 frames. ], batch size: 265, lr: 2.45e-02, grad_scale: 8.0 +2023-03-27 21:04:01,452 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10885.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 21:04:44,769 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.321e+02 5.550e+02 6.735e+02 8.191e+02 1.432e+03, threshold=1.347e+03, percent-clipped=1.0 +2023-03-27 21:05:25,967 INFO [train.py:892] (0/4) Epoch 6, batch 1650, loss[loss=0.2845, simple_loss=0.3498, pruned_loss=0.1096, over 19540.00 frames. ], tot_loss[loss=0.2785, simple_loss=0.3224, pruned_loss=0.1173, over 3947988.19 frames. ], batch size: 54, lr: 2.44e-02, grad_scale: 8.0 +2023-03-27 21:05:32,064 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-27 21:06:07,972 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10946.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 21:06:43,993 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9169, 3.0898, 3.4990, 4.1209, 2.6660, 3.2231, 2.7073, 2.3520], + device='cuda:0'), covar=tensor([0.0370, 0.2725, 0.0775, 0.0138, 0.2171, 0.0491, 0.0964, 0.2108], + device='cuda:0'), in_proj_covar=tensor([0.0153, 0.0320, 0.0194, 0.0107, 0.0225, 0.0141, 0.0174, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 21:06:45,954 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7933, 2.5551, 3.1070, 2.2202, 2.9907, 2.2740, 2.5360, 3.1479], + device='cuda:0'), covar=tensor([0.0515, 0.0425, 0.0370, 0.0623, 0.0350, 0.0413, 0.0362, 0.0214], + device='cuda:0'), in_proj_covar=tensor([0.0046, 0.0045, 0.0049, 0.0068, 0.0046, 0.0041, 0.0042, 0.0037], + device='cuda:0'), out_proj_covar=tensor([1.1995e-04, 1.1979e-04, 1.2696e-04, 1.6337e-04, 1.1641e-04, 1.1091e-04, + 1.1339e-04, 9.7601e-05], device='cuda:0') +2023-03-27 21:06:56,396 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8815, 2.7148, 1.6315, 3.4968, 3.0207, 3.3811, 3.4603, 2.7526], + device='cuda:0'), covar=tensor([0.0512, 0.0556, 0.1247, 0.0371, 0.0461, 0.0325, 0.0448, 0.0560], + device='cuda:0'), in_proj_covar=tensor([0.0101, 0.0092, 0.0117, 0.0098, 0.0084, 0.0075, 0.0089, 0.0101], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 21:07:11,138 INFO [train.py:892] (0/4) Epoch 6, batch 1700, loss[loss=0.2461, simple_loss=0.3113, pruned_loss=0.09049, over 19731.00 frames. ], tot_loss[loss=0.2795, simple_loss=0.3234, pruned_loss=0.1178, over 3947244.68 frames. ], batch size: 52, lr: 2.44e-02, grad_scale: 8.0 +2023-03-27 21:07:42,685 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.97 vs. limit=2.0 +2023-03-27 21:08:17,554 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.223e+02 5.843e+02 7.049e+02 9.611e+02 2.273e+03, threshold=1.410e+03, percent-clipped=6.0 +2023-03-27 21:08:51,973 INFO [train.py:892] (0/4) Epoch 6, batch 1750, loss[loss=0.233, simple_loss=0.2894, pruned_loss=0.08836, over 19483.00 frames. ], tot_loss[loss=0.2766, simple_loss=0.3205, pruned_loss=0.1163, over 3948094.69 frames. ], batch size: 43, lr: 2.43e-02, grad_scale: 8.0 +2023-03-27 21:09:23,959 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9249, 2.6707, 3.2296, 2.1814, 3.2268, 2.4433, 2.8020, 3.3739], + device='cuda:0'), covar=tensor([0.0422, 0.0310, 0.0512, 0.0596, 0.0292, 0.0334, 0.0292, 0.0143], + device='cuda:0'), in_proj_covar=tensor([0.0046, 0.0045, 0.0049, 0.0068, 0.0046, 0.0041, 0.0042, 0.0037], + device='cuda:0'), out_proj_covar=tensor([1.2101e-04, 1.1856e-04, 1.2583e-04, 1.6272e-04, 1.1667e-04, 1.1015e-04, + 1.1326e-04, 9.6706e-05], device='cuda:0') +2023-03-27 21:10:02,999 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11065.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:10:19,209 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-27 21:10:21,302 INFO [train.py:892] (0/4) Epoch 6, batch 1800, loss[loss=0.2383, simple_loss=0.2802, pruned_loss=0.09825, over 19828.00 frames. ], tot_loss[loss=0.2782, simple_loss=0.3221, pruned_loss=0.1172, over 3945274.81 frames. ], batch size: 128, lr: 2.43e-02, grad_scale: 8.0 +2023-03-27 21:10:23,985 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.68 vs. limit=2.0 +2023-03-27 21:10:41,986 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.76 vs. limit=2.0 +2023-03-27 21:11:02,720 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11100.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:11:13,682 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.857e+02 5.952e+02 7.096e+02 9.277e+02 1.982e+03, threshold=1.419e+03, percent-clipped=4.0 +2023-03-27 21:11:18,973 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11110.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 21:11:23,875 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11113.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:11:44,928 INFO [train.py:892] (0/4) Epoch 6, batch 1850, loss[loss=0.2998, simple_loss=0.3502, pruned_loss=0.1247, over 19819.00 frames. ], tot_loss[loss=0.2794, simple_loss=0.3241, pruned_loss=0.1173, over 3945981.14 frames. ], batch size: 57, lr: 2.42e-02, grad_scale: 8.0 +2023-03-27 21:11:52,388 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-6.pt +2023-03-27 21:12:41,541 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11130.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:12:42,780 INFO [train.py:892] (0/4) Epoch 7, batch 0, loss[loss=0.3122, simple_loss=0.3432, pruned_loss=0.1406, over 19776.00 frames. ], tot_loss[loss=0.3122, simple_loss=0.3432, pruned_loss=0.1406, over 19776.00 frames. ], batch size: 233, lr: 2.27e-02, grad_scale: 8.0 +2023-03-27 21:12:42,781 INFO [train.py:917] (0/4) Computing validation loss +2023-03-27 21:13:10,591 INFO [train.py:926] (0/4) Epoch 7, validation: loss=0.1961, simple_loss=0.2755, pruned_loss=0.05831, over 2883724.00 frames. +2023-03-27 21:13:10,592 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-27 21:14:09,092 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11158.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:14:09,388 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4389, 3.3721, 1.7024, 4.0207, 3.6384, 4.0433, 4.1868, 3.2412], + device='cuda:0'), covar=tensor([0.0482, 0.0461, 0.1544, 0.0543, 0.0469, 0.0363, 0.0737, 0.0641], + device='cuda:0'), in_proj_covar=tensor([0.0102, 0.0092, 0.0118, 0.0099, 0.0085, 0.0077, 0.0089, 0.0102], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 21:14:42,612 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11174.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:14:56,447 INFO [train.py:892] (0/4) Epoch 7, batch 50, loss[loss=0.2266, simple_loss=0.287, pruned_loss=0.08314, over 19762.00 frames. ], tot_loss[loss=0.2589, simple_loss=0.3058, pruned_loss=0.106, over 890175.91 frames. ], batch size: 88, lr: 2.27e-02, grad_scale: 8.0 +2023-03-27 21:15:51,178 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.005e+02 5.596e+02 6.713e+02 8.168e+02 2.476e+03, threshold=1.343e+03, percent-clipped=3.0 +2023-03-27 21:16:43,173 INFO [train.py:892] (0/4) Epoch 7, batch 100, loss[loss=0.2918, simple_loss=0.3237, pruned_loss=0.1299, over 19807.00 frames. ], tot_loss[loss=0.2639, simple_loss=0.3118, pruned_loss=0.108, over 1567765.86 frames. ], batch size: 288, lr: 2.26e-02, grad_scale: 8.0 +2023-03-27 21:16:52,123 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11235.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:17:05,832 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11241.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 21:18:29,335 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8055, 2.9176, 3.1266, 2.1052, 2.9583, 2.1428, 2.4279, 3.2472], + device='cuda:0'), covar=tensor([0.0545, 0.0338, 0.0515, 0.0678, 0.0320, 0.0436, 0.0521, 0.0181], + device='cuda:0'), in_proj_covar=tensor([0.0046, 0.0045, 0.0050, 0.0069, 0.0045, 0.0042, 0.0042, 0.0037], + device='cuda:0'), out_proj_covar=tensor([1.2294e-04, 1.1872e-04, 1.2991e-04, 1.6807e-04, 1.1692e-04, 1.1247e-04, + 1.1468e-04, 9.6963e-05], device='cuda:0') +2023-03-27 21:18:30,180 INFO [train.py:892] (0/4) Epoch 7, batch 150, loss[loss=0.232, simple_loss=0.2888, pruned_loss=0.08763, over 19852.00 frames. ], tot_loss[loss=0.2657, simple_loss=0.3126, pruned_loss=0.1094, over 2095642.71 frames. ], batch size: 118, lr: 2.26e-02, grad_scale: 8.0 +2023-03-27 21:19:26,066 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.645e+02 5.370e+02 6.417e+02 7.584e+02 2.097e+03, threshold=1.283e+03, percent-clipped=1.0 +2023-03-27 21:20:18,355 INFO [train.py:892] (0/4) Epoch 7, batch 200, loss[loss=0.2548, simple_loss=0.2993, pruned_loss=0.1052, over 19775.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3144, pruned_loss=0.1106, over 2506222.55 frames. ], batch size: 163, lr: 2.26e-02, grad_scale: 8.0 +2023-03-27 21:21:03,332 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9760, 5.1647, 5.5416, 5.2754, 5.1751, 4.9254, 5.0961, 5.0565], + device='cuda:0'), covar=tensor([0.1446, 0.0940, 0.0872, 0.0841, 0.0690, 0.0834, 0.1929, 0.2179], + device='cuda:0'), in_proj_covar=tensor([0.0217, 0.0193, 0.0260, 0.0203, 0.0194, 0.0191, 0.0248, 0.0288], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-27 21:21:40,461 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.56 vs. limit=2.0 +2023-03-27 21:22:03,987 INFO [train.py:892] (0/4) Epoch 7, batch 250, loss[loss=0.3992, simple_loss=0.4282, pruned_loss=0.1851, over 19250.00 frames. ], tot_loss[loss=0.269, simple_loss=0.3152, pruned_loss=0.1114, over 2826481.98 frames. ], batch size: 483, lr: 2.25e-02, grad_scale: 8.0 +2023-03-27 21:22:32,845 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0441, 2.8927, 1.6021, 3.5731, 3.3224, 3.5468, 3.7033, 2.8738], + device='cuda:0'), covar=tensor([0.0545, 0.0535, 0.1563, 0.0519, 0.0417, 0.0436, 0.0470, 0.0704], + device='cuda:0'), in_proj_covar=tensor([0.0104, 0.0092, 0.0116, 0.0099, 0.0084, 0.0078, 0.0090, 0.0102], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 21:22:44,463 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11400.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:22:52,159 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8596, 2.5111, 2.8409, 2.9450, 3.1990, 3.1567, 3.6701, 3.8349], + device='cuda:0'), covar=tensor([0.0433, 0.1431, 0.1139, 0.1332, 0.1215, 0.1069, 0.0288, 0.0315], + device='cuda:0'), in_proj_covar=tensor([0.0168, 0.0187, 0.0192, 0.0207, 0.0215, 0.0194, 0.0129, 0.0131], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 21:22:57,027 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.370e+02 5.501e+02 6.755e+02 8.647e+02 2.452e+03, threshold=1.351e+03, percent-clipped=4.0 +2023-03-27 21:23:13,032 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6941, 1.8482, 2.5950, 2.9984, 3.1600, 3.3298, 3.5606, 3.5448], + device='cuda:0'), covar=tensor([0.0737, 0.2145, 0.0727, 0.0468, 0.0400, 0.0219, 0.0195, 0.0232], + device='cuda:0'), in_proj_covar=tensor([0.0106, 0.0157, 0.0109, 0.0102, 0.0083, 0.0081, 0.0076, 0.0078], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-27 21:23:45,348 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11430.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:23:46,581 INFO [train.py:892] (0/4) Epoch 7, batch 300, loss[loss=0.256, simple_loss=0.3067, pruned_loss=0.1026, over 19823.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3145, pruned_loss=0.1106, over 3076736.73 frames. ], batch size: 76, lr: 2.25e-02, grad_scale: 8.0 +2023-03-27 21:24:04,371 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.70 vs. limit=2.0 +2023-03-27 21:24:25,265 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11448.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:25:27,190 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-27 21:25:28,287 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11478.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:25:34,576 INFO [train.py:892] (0/4) Epoch 7, batch 350, loss[loss=0.2448, simple_loss=0.2969, pruned_loss=0.09639, over 19847.00 frames. ], tot_loss[loss=0.2698, simple_loss=0.316, pruned_loss=0.1118, over 3270175.59 frames. ], batch size: 85, lr: 2.24e-02, grad_scale: 8.0 +2023-03-27 21:25:43,397 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.19 vs. limit=5.0 +2023-03-27 21:26:29,992 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.848e+02 5.697e+02 6.987e+02 8.639e+02 1.327e+03, threshold=1.397e+03, percent-clipped=1.0 +2023-03-27 21:27:07,534 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.3329, 2.6758, 1.6616, 1.7174, 2.2817, 2.6940, 2.5759, 2.1704], + device='cuda:0'), covar=tensor([0.0220, 0.0393, 0.0292, 0.0711, 0.0351, 0.0247, 0.0216, 0.0426], + device='cuda:0'), in_proj_covar=tensor([0.0042, 0.0041, 0.0042, 0.0059, 0.0057, 0.0039, 0.0034, 0.0037], + device='cuda:0'), out_proj_covar=tensor([9.2354e-05, 9.2498e-05, 9.0682e-05, 1.3370e-04, 1.2753e-04, 8.9473e-05, + 7.7687e-05, 8.3237e-05], device='cuda:0') +2023-03-27 21:27:09,573 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.2243, 2.4392, 1.8453, 1.6170, 2.2236, 2.6371, 2.3913, 2.1435], + device='cuda:0'), covar=tensor([0.0258, 0.0384, 0.0258, 0.0747, 0.0366, 0.0192, 0.0234, 0.0355], + device='cuda:0'), in_proj_covar=tensor([0.0042, 0.0041, 0.0042, 0.0059, 0.0057, 0.0039, 0.0034, 0.0037], + device='cuda:0'), out_proj_covar=tensor([9.2206e-05, 9.2334e-05, 9.0532e-05, 1.3352e-04, 1.2733e-04, 8.9357e-05, + 7.7576e-05, 8.3137e-05], device='cuda:0') +2023-03-27 21:27:18,047 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11530.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:27:18,261 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4073, 2.3812, 1.3591, 3.0017, 2.6747, 2.8913, 2.9431, 2.4835], + device='cuda:0'), covar=tensor([0.0646, 0.0573, 0.1450, 0.0348, 0.0506, 0.0322, 0.0402, 0.0630], + device='cuda:0'), in_proj_covar=tensor([0.0103, 0.0091, 0.0115, 0.0097, 0.0083, 0.0076, 0.0090, 0.0101], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 21:27:19,346 INFO [train.py:892] (0/4) Epoch 7, batch 400, loss[loss=0.2668, simple_loss=0.3088, pruned_loss=0.1124, over 19743.00 frames. ], tot_loss[loss=0.2679, simple_loss=0.3144, pruned_loss=0.1107, over 3420493.68 frames. ], batch size: 219, lr: 2.24e-02, grad_scale: 8.0 +2023-03-27 21:27:22,483 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11532.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:27:28,266 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2002, 3.9389, 3.9818, 3.8459, 4.1409, 3.1525, 3.4672, 2.5119], + device='cuda:0'), covar=tensor([0.0168, 0.0169, 0.0135, 0.0164, 0.0124, 0.0671, 0.0753, 0.0979], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0087, 0.0082, 0.0092, 0.0083, 0.0104, 0.0117, 0.0102], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-27 21:27:40,982 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11541.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 21:29:06,952 INFO [train.py:892] (0/4) Epoch 7, batch 450, loss[loss=0.2342, simple_loss=0.2776, pruned_loss=0.09544, over 19767.00 frames. ], tot_loss[loss=0.268, simple_loss=0.3145, pruned_loss=0.1107, over 3538952.12 frames. ], batch size: 119, lr: 2.23e-02, grad_scale: 8.0 +2023-03-27 21:29:23,271 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11589.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 21:29:31,259 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11593.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:30:00,683 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.550e+02 5.488e+02 6.753e+02 8.459e+02 1.411e+03, threshold=1.351e+03, percent-clipped=1.0 +2023-03-27 21:30:52,138 INFO [train.py:892] (0/4) Epoch 7, batch 500, loss[loss=0.2493, simple_loss=0.3055, pruned_loss=0.09651, over 19860.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.3134, pruned_loss=0.11, over 3630653.62 frames. ], batch size: 118, lr: 2.23e-02, grad_scale: 8.0 +2023-03-27 21:30:59,257 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5737, 3.2515, 4.6745, 3.8030, 4.3721, 4.3623, 4.3865, 4.1962], + device='cuda:0'), covar=tensor([0.0133, 0.0523, 0.0083, 0.1147, 0.0090, 0.0180, 0.0094, 0.0104], + device='cuda:0'), in_proj_covar=tensor([0.0063, 0.0073, 0.0060, 0.0130, 0.0053, 0.0063, 0.0059, 0.0053], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0004, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 21:31:01,093 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11635.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:32:37,354 INFO [train.py:892] (0/4) Epoch 7, batch 550, loss[loss=0.2975, simple_loss=0.3493, pruned_loss=0.1229, over 19683.00 frames. ], tot_loss[loss=0.2662, simple_loss=0.3132, pruned_loss=0.1096, over 3702158.43 frames. ], batch size: 56, lr: 2.23e-02, grad_scale: 8.0 +2023-03-27 21:33:09,633 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11696.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:33:15,220 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11699.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:33:31,005 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.036e+02 6.098e+02 7.200e+02 8.839e+02 1.494e+03, threshold=1.440e+03, percent-clipped=2.0 +2023-03-27 21:34:22,372 INFO [train.py:892] (0/4) Epoch 7, batch 600, loss[loss=0.2942, simple_loss=0.3341, pruned_loss=0.1272, over 19821.00 frames. ], tot_loss[loss=0.2668, simple_loss=0.3134, pruned_loss=0.1101, over 3758173.61 frames. ], batch size: 202, lr: 2.22e-02, grad_scale: 8.0 +2023-03-27 21:35:14,993 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.94 vs. limit=5.0 +2023-03-27 21:35:24,285 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11760.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:36:07,569 INFO [train.py:892] (0/4) Epoch 7, batch 650, loss[loss=0.301, simple_loss=0.3486, pruned_loss=0.1267, over 19695.00 frames. ], tot_loss[loss=0.2673, simple_loss=0.314, pruned_loss=0.1103, over 3798917.04 frames. ], batch size: 265, lr: 2.22e-02, grad_scale: 8.0 +2023-03-27 21:37:02,909 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.631e+02 5.681e+02 6.679e+02 8.355e+02 2.061e+03, threshold=1.336e+03, percent-clipped=4.0 +2023-03-27 21:37:22,625 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11816.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:37:52,464 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11830.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:37:53,589 INFO [train.py:892] (0/4) Epoch 7, batch 700, loss[loss=0.2603, simple_loss=0.3049, pruned_loss=0.1079, over 19787.00 frames. ], tot_loss[loss=0.2668, simple_loss=0.3138, pruned_loss=0.1099, over 3833324.11 frames. ], batch size: 154, lr: 2.21e-02, grad_scale: 8.0 +2023-03-27 21:38:25,419 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11846.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:39:32,230 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11877.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:39:34,146 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11878.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:39:39,323 INFO [train.py:892] (0/4) Epoch 7, batch 750, loss[loss=0.2679, simple_loss=0.3114, pruned_loss=0.1122, over 19763.00 frames. ], tot_loss[loss=0.2668, simple_loss=0.3141, pruned_loss=0.1097, over 3858973.11 frames. ], batch size: 217, lr: 2.21e-02, grad_scale: 8.0 +2023-03-27 21:39:53,457 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11888.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:40:34,148 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.808e+02 5.639e+02 6.770e+02 8.403e+02 1.981e+03, threshold=1.354e+03, percent-clipped=4.0 +2023-03-27 21:40:35,192 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11907.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:41:02,197 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11920.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:41:20,023 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-27 21:41:22,935 INFO [train.py:892] (0/4) Epoch 7, batch 800, loss[loss=0.2381, simple_loss=0.2872, pruned_loss=0.09449, over 19806.00 frames. ], tot_loss[loss=0.2664, simple_loss=0.3138, pruned_loss=0.1094, over 3879231.69 frames. ], batch size: 47, lr: 2.21e-02, grad_scale: 8.0 +2023-03-27 21:41:59,484 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11948.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:43:01,749 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.55 vs. limit=5.0 +2023-03-27 21:43:08,370 INFO [train.py:892] (0/4) Epoch 7, batch 850, loss[loss=0.2879, simple_loss=0.3215, pruned_loss=0.1271, over 19819.00 frames. ], tot_loss[loss=0.2674, simple_loss=0.3146, pruned_loss=0.1101, over 3894833.92 frames. ], batch size: 72, lr: 2.20e-02, grad_scale: 8.0 +2023-03-27 21:43:09,299 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11981.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:43:14,902 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11984.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:43:29,715 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11991.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:43:46,797 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-12000.pt +2023-03-27 21:44:07,070 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.301e+02 6.093e+02 7.463e+02 9.130e+02 1.696e+03, threshold=1.493e+03, percent-clipped=3.0 +2023-03-27 21:44:12,037 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12009.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:44:56,860 INFO [train.py:892] (0/4) Epoch 7, batch 900, loss[loss=0.2586, simple_loss=0.3234, pruned_loss=0.09694, over 19768.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.3144, pruned_loss=0.1095, over 3907251.64 frames. ], batch size: 88, lr: 2.20e-02, grad_scale: 8.0 +2023-03-27 21:45:27,335 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12045.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:45:48,840 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12055.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:46:32,730 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12076.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:46:36,736 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8388, 2.7181, 3.9846, 3.5467, 3.7385, 3.8100, 3.8948, 3.7577], + device='cuda:0'), covar=tensor([0.0218, 0.0681, 0.0109, 0.0847, 0.0122, 0.0230, 0.0160, 0.0157], + device='cuda:0'), in_proj_covar=tensor([0.0064, 0.0073, 0.0059, 0.0131, 0.0052, 0.0063, 0.0059, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0004, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 21:46:44,675 INFO [train.py:892] (0/4) Epoch 7, batch 950, loss[loss=0.2399, simple_loss=0.2908, pruned_loss=0.09455, over 19732.00 frames. ], tot_loss[loss=0.2675, simple_loss=0.315, pruned_loss=0.11, over 3916426.21 frames. ], batch size: 47, lr: 2.19e-02, grad_scale: 8.0 +2023-03-27 21:47:39,035 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.928e+02 5.727e+02 6.871e+02 8.158e+02 1.418e+03, threshold=1.374e+03, percent-clipped=0.0 +2023-03-27 21:48:29,672 INFO [train.py:892] (0/4) Epoch 7, batch 1000, loss[loss=0.2682, simple_loss=0.3067, pruned_loss=0.1149, over 19814.00 frames. ], tot_loss[loss=0.2675, simple_loss=0.3148, pruned_loss=0.1101, over 3924864.99 frames. ], batch size: 167, lr: 2.19e-02, grad_scale: 8.0 +2023-03-27 21:48:43,837 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12137.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:48:51,582 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.7038, 2.2217, 1.7624, 1.1482, 1.7279, 2.0820, 2.0050, 2.0636], + device='cuda:0'), covar=tensor([0.0246, 0.0215, 0.0184, 0.0586, 0.0450, 0.0192, 0.0136, 0.0145], + device='cuda:0'), in_proj_covar=tensor([0.0043, 0.0041, 0.0042, 0.0060, 0.0059, 0.0040, 0.0033, 0.0037], + device='cuda:0'), out_proj_covar=tensor([9.7517e-05, 9.4135e-05, 9.3001e-05, 1.3681e-04, 1.3288e-04, 9.2390e-05, + 7.7491e-05, 8.3437e-05], device='cuda:0') +2023-03-27 21:49:55,655 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12172.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:50:13,712 INFO [train.py:892] (0/4) Epoch 7, batch 1050, loss[loss=0.2492, simple_loss=0.3161, pruned_loss=0.0911, over 19785.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3148, pruned_loss=0.1104, over 3931299.01 frames. ], batch size: 53, lr: 2.19e-02, grad_scale: 8.0 +2023-03-27 21:50:31,272 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9055, 3.4604, 3.6275, 3.8891, 3.7148, 3.7035, 3.9948, 4.1248], + device='cuda:0'), covar=tensor([0.0579, 0.0371, 0.0454, 0.0290, 0.0474, 0.0403, 0.0339, 0.0266], + device='cuda:0'), in_proj_covar=tensor([0.0112, 0.0124, 0.0146, 0.0125, 0.0120, 0.0100, 0.0118, 0.0135], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 21:50:31,298 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12188.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:50:59,942 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12202.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:51:09,330 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.917e+02 5.376e+02 7.198e+02 8.445e+02 1.495e+03, threshold=1.440e+03, percent-clipped=1.0 +2023-03-27 21:51:23,152 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2504, 3.2812, 3.7515, 4.5067, 2.9481, 3.2391, 2.7873, 2.7282], + device='cuda:0'), covar=tensor([0.0379, 0.2733, 0.0751, 0.0154, 0.2062, 0.0652, 0.1021, 0.1735], + device='cuda:0'), in_proj_covar=tensor([0.0164, 0.0325, 0.0206, 0.0118, 0.0226, 0.0148, 0.0178, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0001, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 21:52:00,172 INFO [train.py:892] (0/4) Epoch 7, batch 1100, loss[loss=0.2737, simple_loss=0.3245, pruned_loss=0.1114, over 19883.00 frames. ], tot_loss[loss=0.2682, simple_loss=0.3151, pruned_loss=0.1106, over 3934444.41 frames. ], batch size: 84, lr: 2.18e-02, grad_scale: 8.0 +2023-03-27 21:52:12,242 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12236.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:52:26,231 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5965, 3.0867, 4.6909, 3.7913, 4.2155, 4.4161, 4.4541, 4.2301], + device='cuda:0'), covar=tensor([0.0137, 0.0530, 0.0074, 0.1049, 0.0092, 0.0165, 0.0102, 0.0095], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0076, 0.0061, 0.0136, 0.0054, 0.0066, 0.0062, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 21:53:35,987 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12276.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:53:45,135 INFO [train.py:892] (0/4) Epoch 7, batch 1150, loss[loss=0.2635, simple_loss=0.3165, pruned_loss=0.1053, over 19795.00 frames. ], tot_loss[loss=0.267, simple_loss=0.3143, pruned_loss=0.1099, over 3937210.93 frames. ], batch size: 185, lr: 2.18e-02, grad_scale: 8.0 +2023-03-27 21:54:06,708 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12291.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:54:08,857 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.20 vs. limit=5.0 +2023-03-27 21:54:34,455 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12304.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:54:41,305 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.471e+02 5.763e+02 6.995e+02 9.054e+02 1.717e+03, threshold=1.399e+03, percent-clipped=2.0 +2023-03-27 21:55:31,483 INFO [train.py:892] (0/4) Epoch 7, batch 1200, loss[loss=0.281, simple_loss=0.3235, pruned_loss=0.1193, over 19787.00 frames. ], tot_loss[loss=0.2654, simple_loss=0.3129, pruned_loss=0.1089, over 3939471.76 frames. ], batch size: 236, lr: 2.18e-02, grad_scale: 8.0 +2023-03-27 21:55:48,730 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12339.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:55:50,619 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12340.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:56:21,256 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12355.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:56:33,066 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-27 21:56:38,310 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9071, 5.2285, 5.2683, 5.2599, 4.9289, 5.2085, 4.5854, 4.8151], + device='cuda:0'), covar=tensor([0.0405, 0.0376, 0.0614, 0.0401, 0.0628, 0.0570, 0.0587, 0.0926], + device='cuda:0'), in_proj_covar=tensor([0.0166, 0.0159, 0.0209, 0.0171, 0.0159, 0.0151, 0.0188, 0.0221], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-27 21:57:17,156 INFO [train.py:892] (0/4) Epoch 7, batch 1250, loss[loss=0.2188, simple_loss=0.2775, pruned_loss=0.08003, over 19812.00 frames. ], tot_loss[loss=0.2658, simple_loss=0.3131, pruned_loss=0.1092, over 3942684.62 frames. ], batch size: 40, lr: 2.17e-02, grad_scale: 8.0 +2023-03-27 21:58:02,546 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12403.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:58:09,909 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.119e+02 6.346e+02 7.494e+02 8.891e+02 1.193e+03, threshold=1.499e+03, percent-clipped=0.0 +2023-03-27 21:58:31,060 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9383, 4.1066, 2.4101, 4.4069, 4.5085, 1.9497, 3.6138, 3.5897], + device='cuda:0'), covar=tensor([0.0583, 0.0657, 0.2777, 0.0416, 0.0198, 0.3290, 0.1193, 0.0530], + device='cuda:0'), in_proj_covar=tensor([0.0159, 0.0184, 0.0199, 0.0146, 0.0102, 0.0192, 0.0201, 0.0132], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-03-27 21:58:36,291 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0763, 3.3155, 1.8890, 4.0704, 3.5240, 4.0266, 4.1685, 3.0223], + device='cuda:0'), covar=tensor([0.0578, 0.0475, 0.1452, 0.0394, 0.0433, 0.0378, 0.0460, 0.0679], + device='cuda:0'), in_proj_covar=tensor([0.0106, 0.0096, 0.0119, 0.0102, 0.0084, 0.0080, 0.0095, 0.0104], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 21:59:01,224 INFO [train.py:892] (0/4) Epoch 7, batch 1300, loss[loss=0.2807, simple_loss=0.324, pruned_loss=0.1187, over 19764.00 frames. ], tot_loss[loss=0.2667, simple_loss=0.314, pruned_loss=0.1097, over 3944470.39 frames. ], batch size: 244, lr: 2.17e-02, grad_scale: 8.0 +2023-03-27 21:59:03,843 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12432.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 21:59:33,325 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12446.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:00:03,466 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.63 vs. limit=2.0 +2023-03-27 22:00:27,402 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12472.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:00:45,674 INFO [train.py:892] (0/4) Epoch 7, batch 1350, loss[loss=0.2431, simple_loss=0.3032, pruned_loss=0.09154, over 19902.00 frames. ], tot_loss[loss=0.2691, simple_loss=0.3156, pruned_loss=0.1113, over 3943924.66 frames. ], batch size: 50, lr: 2.16e-02, grad_scale: 16.0 +2023-03-27 22:01:21,889 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-27 22:01:28,972 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12502.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:01:39,461 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.642e+02 6.155e+02 7.029e+02 9.046e+02 1.832e+03, threshold=1.406e+03, percent-clipped=3.0 +2023-03-27 22:01:40,561 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12507.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:02:06,987 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12520.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:02:29,804 INFO [train.py:892] (0/4) Epoch 7, batch 1400, loss[loss=0.2661, simple_loss=0.308, pruned_loss=0.1121, over 19723.00 frames. ], tot_loss[loss=0.2681, simple_loss=0.3148, pruned_loss=0.1107, over 3945158.00 frames. ], batch size: 104, lr: 2.16e-02, grad_scale: 16.0 +2023-03-27 22:03:08,822 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12550.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:03:44,804 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1543, 2.2693, 2.2550, 2.2249, 1.9375, 2.2460, 2.0641, 2.2934], + device='cuda:0'), covar=tensor([0.0265, 0.0279, 0.0246, 0.0308, 0.0445, 0.0354, 0.0391, 0.0320], + device='cuda:0'), in_proj_covar=tensor([0.0034, 0.0036, 0.0037, 0.0030, 0.0040, 0.0038, 0.0049, 0.0034], + device='cuda:0'), out_proj_covar=tensor([7.5119e-05, 7.8909e-05, 8.1827e-05, 6.8006e-05, 8.8275e-05, 8.3064e-05, + 1.0667e-04, 7.5534e-05], device='cuda:0') +2023-03-27 22:03:54,707 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.91 vs. limit=5.0 +2023-03-27 22:04:03,984 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12576.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:04:14,046 INFO [train.py:892] (0/4) Epoch 7, batch 1450, loss[loss=0.2674, simple_loss=0.3112, pruned_loss=0.1118, over 19836.00 frames. ], tot_loss[loss=0.2664, simple_loss=0.3141, pruned_loss=0.1094, over 3947318.98 frames. ], batch size: 171, lr: 2.16e-02, grad_scale: 16.0 +2023-03-27 22:05:02,555 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12604.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:05:06,614 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9226, 2.1456, 2.4815, 2.8672, 1.8496, 2.4848, 1.7280, 1.9060], + device='cuda:0'), covar=tensor([0.0613, 0.1946, 0.1084, 0.0278, 0.2798, 0.0650, 0.1581, 0.1922], + device='cuda:0'), in_proj_covar=tensor([0.0167, 0.0329, 0.0208, 0.0122, 0.0233, 0.0151, 0.0185, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0001, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 22:05:07,591 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.016e+02 5.644e+02 6.580e+02 8.199e+02 1.502e+03, threshold=1.316e+03, percent-clipped=1.0 +2023-03-27 22:05:43,350 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12624.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:05:57,456 INFO [train.py:892] (0/4) Epoch 7, batch 1500, loss[loss=0.3322, simple_loss=0.3518, pruned_loss=0.1563, over 19692.00 frames. ], tot_loss[loss=0.2675, simple_loss=0.3146, pruned_loss=0.1102, over 3948427.78 frames. ], batch size: 265, lr: 2.15e-02, grad_scale: 16.0 +2023-03-27 22:06:16,724 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12640.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:06:41,584 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12652.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:07:33,170 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.98 vs. limit=2.0 +2023-03-27 22:07:41,420 INFO [train.py:892] (0/4) Epoch 7, batch 1550, loss[loss=0.2629, simple_loss=0.3161, pruned_loss=0.1049, over 19946.00 frames. ], tot_loss[loss=0.2656, simple_loss=0.3133, pruned_loss=0.1089, over 3950414.39 frames. ], batch size: 46, lr: 2.15e-02, grad_scale: 16.0 +2023-03-27 22:07:56,848 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12688.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:08:35,757 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.712e+02 5.812e+02 6.854e+02 8.280e+02 1.889e+03, threshold=1.371e+03, percent-clipped=7.0 +2023-03-27 22:09:26,215 INFO [train.py:892] (0/4) Epoch 7, batch 1600, loss[loss=0.2378, simple_loss=0.2927, pruned_loss=0.09151, over 19642.00 frames. ], tot_loss[loss=0.2631, simple_loss=0.3114, pruned_loss=0.1074, over 3951885.87 frames. ], batch size: 69, lr: 2.15e-02, grad_scale: 16.0 +2023-03-27 22:09:28,749 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12732.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:10:05,967 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9980, 3.2709, 4.7977, 3.9097, 4.3657, 4.6790, 4.6956, 4.5102], + device='cuda:0'), covar=tensor([0.0081, 0.0519, 0.0071, 0.1119, 0.0084, 0.0132, 0.0088, 0.0092], + device='cuda:0'), in_proj_covar=tensor([0.0066, 0.0077, 0.0060, 0.0135, 0.0054, 0.0067, 0.0062, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 22:10:12,104 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6211, 3.5474, 3.9883, 5.0336, 3.0078, 3.7254, 2.9157, 2.7612], + device='cuda:0'), covar=tensor([0.0338, 0.3473, 0.0752, 0.0127, 0.2580, 0.0585, 0.1297, 0.2172], + device='cuda:0'), in_proj_covar=tensor([0.0164, 0.0321, 0.0203, 0.0120, 0.0228, 0.0151, 0.0181, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0001, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 22:10:32,845 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12763.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:10:51,474 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8934, 4.8034, 5.3127, 5.1576, 5.0981, 4.6397, 4.9584, 4.9209], + device='cuda:0'), covar=tensor([0.1144, 0.1190, 0.0881, 0.0900, 0.0718, 0.0888, 0.1754, 0.1959], + device='cuda:0'), in_proj_covar=tensor([0.0222, 0.0202, 0.0268, 0.0210, 0.0198, 0.0198, 0.0255, 0.0294], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-27 22:11:06,776 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12780.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:11:08,029 INFO [train.py:892] (0/4) Epoch 7, batch 1650, loss[loss=0.3557, simple_loss=0.385, pruned_loss=0.1632, over 19597.00 frames. ], tot_loss[loss=0.2625, simple_loss=0.3112, pruned_loss=0.1069, over 3951523.32 frames. ], batch size: 376, lr: 2.14e-02, grad_scale: 16.0 +2023-03-27 22:11:54,320 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12802.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:12:04,853 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.717e+02 5.657e+02 6.905e+02 8.764e+02 1.774e+03, threshold=1.381e+03, percent-clipped=3.0 +2023-03-27 22:12:43,789 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12824.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 22:12:56,384 INFO [train.py:892] (0/4) Epoch 7, batch 1700, loss[loss=0.269, simple_loss=0.3139, pruned_loss=0.112, over 19809.00 frames. ], tot_loss[loss=0.2621, simple_loss=0.3113, pruned_loss=0.1065, over 3950386.17 frames. ], batch size: 72, lr: 2.14e-02, grad_scale: 16.0 +2023-03-27 22:14:36,903 INFO [train.py:892] (0/4) Epoch 7, batch 1750, loss[loss=0.2408, simple_loss=0.2897, pruned_loss=0.09594, over 19773.00 frames. ], tot_loss[loss=0.2614, simple_loss=0.3104, pruned_loss=0.1062, over 3951743.74 frames. ], batch size: 116, lr: 2.14e-02, grad_scale: 16.0 +2023-03-27 22:15:06,613 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9869, 2.3216, 3.1475, 3.0702, 3.2085, 3.1626, 3.8959, 4.0525], + device='cuda:0'), covar=tensor([0.0462, 0.1730, 0.1082, 0.1519, 0.1213, 0.1404, 0.0346, 0.0304], + device='cuda:0'), in_proj_covar=tensor([0.0177, 0.0197, 0.0202, 0.0215, 0.0228, 0.0204, 0.0142, 0.0142], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 22:15:23,459 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 5.556e+02 6.737e+02 8.077e+02 1.296e+03, threshold=1.347e+03, percent-clipped=0.0 +2023-03-27 22:16:05,766 INFO [train.py:892] (0/4) Epoch 7, batch 1800, loss[loss=0.263, simple_loss=0.3135, pruned_loss=0.1062, over 19623.00 frames. ], tot_loss[loss=0.2647, simple_loss=0.3133, pruned_loss=0.1081, over 3949290.52 frames. ], batch size: 65, lr: 2.13e-02, grad_scale: 16.0 +2023-03-27 22:16:44,358 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6403, 4.7278, 5.0490, 4.8849, 4.8687, 4.4086, 4.6470, 4.6355], + device='cuda:0'), covar=tensor([0.1312, 0.0923, 0.0973, 0.0952, 0.0863, 0.1022, 0.2037, 0.1915], + device='cuda:0'), in_proj_covar=tensor([0.0232, 0.0210, 0.0277, 0.0216, 0.0207, 0.0202, 0.0263, 0.0306], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-27 22:17:11,958 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5918, 4.2181, 2.7374, 4.8264, 5.0246, 2.3532, 4.1197, 3.8203], + device='cuda:0'), covar=tensor([0.0423, 0.0787, 0.2292, 0.0407, 0.0207, 0.2589, 0.0781, 0.0531], + device='cuda:0'), in_proj_covar=tensor([0.0163, 0.0191, 0.0203, 0.0152, 0.0108, 0.0194, 0.0207, 0.0137], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 22:17:29,653 INFO [train.py:892] (0/4) Epoch 7, batch 1850, loss[loss=0.2317, simple_loss=0.3022, pruned_loss=0.08063, over 19820.00 frames. ], tot_loss[loss=0.2637, simple_loss=0.3135, pruned_loss=0.107, over 3949056.95 frames. ], batch size: 57, lr: 2.13e-02, grad_scale: 16.0 +2023-03-27 22:17:36,948 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-7.pt +2023-03-27 22:18:27,978 INFO [train.py:892] (0/4) Epoch 8, batch 0, loss[loss=0.2162, simple_loss=0.2703, pruned_loss=0.08104, over 19902.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2703, pruned_loss=0.08104, over 19902.00 frames. ], batch size: 94, lr: 2.00e-02, grad_scale: 16.0 +2023-03-27 22:18:27,979 INFO [train.py:917] (0/4) Computing validation loss +2023-03-27 22:18:55,072 INFO [train.py:926] (0/4) Epoch 8, validation: loss=0.189, simple_loss=0.2688, pruned_loss=0.05453, over 2883724.00 frames. +2023-03-27 22:18:55,073 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-27 22:19:40,815 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.963e+02 5.840e+02 6.949e+02 8.045e+02 1.580e+03, threshold=1.390e+03, percent-clipped=1.0 +2023-03-27 22:20:44,100 INFO [train.py:892] (0/4) Epoch 8, batch 50, loss[loss=0.2547, simple_loss=0.2892, pruned_loss=0.1101, over 19763.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3001, pruned_loss=0.1008, over 892108.72 frames. ], batch size: 205, lr: 2.00e-02, grad_scale: 16.0 +2023-03-27 22:22:19,359 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-27 22:22:29,882 INFO [train.py:892] (0/4) Epoch 8, batch 100, loss[loss=0.2351, simple_loss=0.2843, pruned_loss=0.09295, over 19732.00 frames. ], tot_loss[loss=0.2555, simple_loss=0.3054, pruned_loss=0.1028, over 1570307.69 frames. ], batch size: 95, lr: 2.00e-02, grad_scale: 16.0 +2023-03-27 22:23:02,890 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=13102.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:23:13,340 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.875e+02 5.582e+02 6.747e+02 8.374e+02 1.388e+03, threshold=1.349e+03, percent-clipped=0.0 +2023-03-27 22:23:20,298 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7309, 2.2689, 2.7494, 2.4931, 2.4587, 3.0201, 1.7279, 1.9672], + device='cuda:0'), covar=tensor([0.0569, 0.1446, 0.0414, 0.0468, 0.1017, 0.0399, 0.1144, 0.1429], + device='cuda:0'), in_proj_covar=tensor([0.0263, 0.0307, 0.0211, 0.0183, 0.0288, 0.0196, 0.0228, 0.0226], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 22:23:39,228 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=13119.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 22:23:55,712 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-03-27 22:24:13,106 INFO [train.py:892] (0/4) Epoch 8, batch 150, loss[loss=0.2611, simple_loss=0.3054, pruned_loss=0.1084, over 19831.00 frames. ], tot_loss[loss=0.2605, simple_loss=0.3091, pruned_loss=0.106, over 2097856.53 frames. ], batch size: 166, lr: 1.99e-02, grad_scale: 16.0 +2023-03-27 22:24:43,903 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=13150.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:25:58,990 INFO [train.py:892] (0/4) Epoch 8, batch 200, loss[loss=0.2487, simple_loss=0.299, pruned_loss=0.0992, over 19773.00 frames. ], tot_loss[loss=0.2628, simple_loss=0.3121, pruned_loss=0.1068, over 2505197.40 frames. ], batch size: 236, lr: 1.99e-02, grad_scale: 16.0 +2023-03-27 22:26:43,151 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 4.035e+02 5.713e+02 7.016e+02 8.259e+02 1.478e+03, threshold=1.403e+03, percent-clipped=2.0 +2023-03-27 22:26:54,962 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3777, 5.4130, 5.4281, 5.5911, 5.3116, 5.5040, 5.0729, 4.6193], + device='cuda:0'), covar=tensor([0.0721, 0.0819, 0.1025, 0.0842, 0.0789, 0.0976, 0.1327, 0.2169], + device='cuda:0'), in_proj_covar=tensor([0.0166, 0.0159, 0.0211, 0.0169, 0.0160, 0.0155, 0.0185, 0.0222], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-27 22:27:43,922 INFO [train.py:892] (0/4) Epoch 8, batch 250, loss[loss=0.2635, simple_loss=0.3095, pruned_loss=0.1087, over 19826.00 frames. ], tot_loss[loss=0.2623, simple_loss=0.3119, pruned_loss=0.1063, over 2824945.42 frames. ], batch size: 204, lr: 1.99e-02, grad_scale: 16.0 +2023-03-27 22:27:57,867 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1685, 4.0389, 2.5101, 4.5271, 4.6513, 1.7955, 3.8670, 3.4663], + device='cuda:0'), covar=tensor([0.0527, 0.0647, 0.2228, 0.0472, 0.0195, 0.3011, 0.0823, 0.0617], + device='cuda:0'), in_proj_covar=tensor([0.0163, 0.0186, 0.0197, 0.0149, 0.0108, 0.0190, 0.0201, 0.0134], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 22:28:19,220 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-27 22:28:59,319 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3741, 3.2140, 1.9817, 4.3079, 3.6697, 4.2154, 4.3052, 3.1013], + device='cuda:0'), covar=tensor([0.0476, 0.0436, 0.1333, 0.0312, 0.0334, 0.0251, 0.0299, 0.0625], + device='cuda:0'), in_proj_covar=tensor([0.0109, 0.0099, 0.0121, 0.0105, 0.0089, 0.0084, 0.0096, 0.0108], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 22:29:31,206 INFO [train.py:892] (0/4) Epoch 8, batch 300, loss[loss=0.2562, simple_loss=0.3041, pruned_loss=0.1041, over 19703.00 frames. ], tot_loss[loss=0.2607, simple_loss=0.3105, pruned_loss=0.1055, over 3075209.16 frames. ], batch size: 85, lr: 1.98e-02, grad_scale: 16.0 +2023-03-27 22:30:13,179 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.006e+02 5.556e+02 6.827e+02 8.454e+02 1.465e+03, threshold=1.365e+03, percent-clipped=2.0 +2023-03-27 22:31:14,169 INFO [train.py:892] (0/4) Epoch 8, batch 350, loss[loss=0.2691, simple_loss=0.3277, pruned_loss=0.1053, over 19779.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3096, pruned_loss=0.1046, over 3270868.69 frames. ], batch size: 53, lr: 1.98e-02, grad_scale: 16.0 +2023-03-27 22:32:34,854 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2086, 2.8241, 4.1935, 3.5964, 3.8985, 4.0835, 4.0801, 3.8343], + device='cuda:0'), covar=tensor([0.0114, 0.0561, 0.0093, 0.0881, 0.0100, 0.0169, 0.0124, 0.0111], + device='cuda:0'), in_proj_covar=tensor([0.0066, 0.0078, 0.0061, 0.0133, 0.0054, 0.0067, 0.0062, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 22:32:58,888 INFO [train.py:892] (0/4) Epoch 8, batch 400, loss[loss=0.2372, simple_loss=0.2996, pruned_loss=0.08738, over 19905.00 frames. ], tot_loss[loss=0.2574, simple_loss=0.3085, pruned_loss=0.1032, over 3421433.54 frames. ], batch size: 94, lr: 1.98e-02, grad_scale: 16.0 +2023-03-27 22:33:41,531 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.399e+02 5.749e+02 7.183e+02 8.844e+02 1.624e+03, threshold=1.437e+03, percent-clipped=4.0 +2023-03-27 22:33:57,412 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9775, 3.2581, 5.0347, 4.1195, 4.6416, 4.8311, 4.9513, 4.5704], + device='cuda:0'), covar=tensor([0.0103, 0.0521, 0.0069, 0.0847, 0.0077, 0.0112, 0.0081, 0.0078], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0078, 0.0061, 0.0132, 0.0055, 0.0067, 0.0062, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 22:34:07,747 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=13419.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:34:44,919 INFO [train.py:892] (0/4) Epoch 8, batch 450, loss[loss=0.2399, simple_loss=0.2903, pruned_loss=0.09477, over 19692.00 frames. ], tot_loss[loss=0.258, simple_loss=0.3086, pruned_loss=0.1037, over 3538784.98 frames. ], batch size: 45, lr: 1.97e-02, grad_scale: 16.0 +2023-03-27 22:35:49,851 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=13467.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:35:54,317 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7066, 2.8667, 3.2237, 3.7995, 2.5212, 3.0498, 2.3666, 2.3131], + device='cuda:0'), covar=tensor([0.0432, 0.2670, 0.0825, 0.0193, 0.2214, 0.0563, 0.1206, 0.1921], + device='cuda:0'), in_proj_covar=tensor([0.0171, 0.0326, 0.0208, 0.0124, 0.0231, 0.0152, 0.0183, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0001, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 22:36:07,903 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8072, 4.2816, 4.3309, 4.8203, 4.3134, 4.9627, 4.8367, 5.0331], + device='cuda:0'), covar=tensor([0.0522, 0.0269, 0.0380, 0.0208, 0.0586, 0.0176, 0.0313, 0.0238], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0130, 0.0156, 0.0133, 0.0127, 0.0108, 0.0122, 0.0142], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 22:36:27,723 INFO [train.py:892] (0/4) Epoch 8, batch 500, loss[loss=0.256, simple_loss=0.31, pruned_loss=0.101, over 19814.00 frames. ], tot_loss[loss=0.2567, simple_loss=0.3076, pruned_loss=0.1028, over 3630430.66 frames. ], batch size: 96, lr: 1.97e-02, grad_scale: 16.0 +2023-03-27 22:36:48,484 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5263, 1.7991, 2.3941, 2.9017, 3.2094, 3.4226, 3.4049, 3.3881], + device='cuda:0'), covar=tensor([0.0801, 0.2071, 0.1017, 0.0493, 0.0302, 0.0159, 0.0225, 0.0318], + device='cuda:0'), in_proj_covar=tensor([0.0122, 0.0165, 0.0135, 0.0116, 0.0095, 0.0091, 0.0086, 0.0088], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 22:37:10,238 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5532, 3.5787, 2.2407, 3.9789, 3.9348, 1.5731, 3.1787, 3.2194], + device='cuda:0'), covar=tensor([0.0593, 0.0725, 0.2322, 0.0437, 0.0266, 0.3066, 0.0913, 0.0533], + device='cuda:0'), in_proj_covar=tensor([0.0166, 0.0193, 0.0204, 0.0156, 0.0111, 0.0195, 0.0207, 0.0138], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 22:37:11,213 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.922e+02 5.157e+02 6.466e+02 8.211e+02 1.475e+03, threshold=1.293e+03, percent-clipped=2.0 +2023-03-27 22:37:33,543 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.8442, 1.7745, 1.9319, 1.7668, 1.4742, 1.7303, 1.6246, 1.8942], + device='cuda:0'), covar=tensor([0.0218, 0.0270, 0.0249, 0.0252, 0.0352, 0.0386, 0.0433, 0.0288], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0041, 0.0034, 0.0042, 0.0041, 0.0053, 0.0036], + device='cuda:0'), out_proj_covar=tensor([8.2296e-05, 8.4215e-05, 8.8648e-05, 7.6817e-05, 9.2581e-05, 9.0258e-05, + 1.1556e-04, 8.2501e-05], device='cuda:0') +2023-03-27 22:38:12,607 INFO [train.py:892] (0/4) Epoch 8, batch 550, loss[loss=0.2288, simple_loss=0.2832, pruned_loss=0.08719, over 19711.00 frames. ], tot_loss[loss=0.2559, simple_loss=0.307, pruned_loss=0.1024, over 3701880.84 frames. ], batch size: 85, lr: 1.97e-02, grad_scale: 16.0 +2023-03-27 22:39:58,920 INFO [train.py:892] (0/4) Epoch 8, batch 600, loss[loss=0.2374, simple_loss=0.2968, pruned_loss=0.08903, over 19888.00 frames. ], tot_loss[loss=0.2579, simple_loss=0.3083, pruned_loss=0.1037, over 3756649.98 frames. ], batch size: 47, lr: 1.97e-02, grad_scale: 16.0 +2023-03-27 22:40:22,878 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4505, 4.3543, 4.7877, 4.6235, 4.7024, 4.0915, 4.4214, 4.3405], + device='cuda:0'), covar=tensor([0.1328, 0.1253, 0.0956, 0.1025, 0.0857, 0.0939, 0.2073, 0.2480], + device='cuda:0'), in_proj_covar=tensor([0.0229, 0.0210, 0.0269, 0.0210, 0.0205, 0.0195, 0.0259, 0.0293], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-27 22:40:23,006 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5433, 2.5586, 3.6990, 3.2495, 3.4516, 3.5639, 3.6006, 3.3612], + device='cuda:0'), covar=tensor([0.0194, 0.0602, 0.0105, 0.0624, 0.0105, 0.0223, 0.0147, 0.0129], + device='cuda:0'), in_proj_covar=tensor([0.0066, 0.0077, 0.0060, 0.0131, 0.0054, 0.0068, 0.0062, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 22:40:41,840 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.580e+02 5.730e+02 6.817e+02 8.110e+02 1.556e+03, threshold=1.363e+03, percent-clipped=3.0 +2023-03-27 22:41:42,659 INFO [train.py:892] (0/4) Epoch 8, batch 650, loss[loss=0.2534, simple_loss=0.3099, pruned_loss=0.09849, over 19730.00 frames. ], tot_loss[loss=0.2588, simple_loss=0.3086, pruned_loss=0.1044, over 3800399.83 frames. ], batch size: 63, lr: 1.96e-02, grad_scale: 16.0 +2023-03-27 22:43:26,131 INFO [train.py:892] (0/4) Epoch 8, batch 700, loss[loss=0.2222, simple_loss=0.2864, pruned_loss=0.07897, over 19607.00 frames. ], tot_loss[loss=0.2585, simple_loss=0.3083, pruned_loss=0.1044, over 3834365.50 frames. ], batch size: 48, lr: 1.96e-02, grad_scale: 16.0 +2023-03-27 22:44:11,779 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.892e+02 5.950e+02 6.978e+02 8.185e+02 2.283e+03, threshold=1.396e+03, percent-clipped=2.0 +2023-03-27 22:44:44,023 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8847, 4.2624, 4.2157, 4.1327, 3.9678, 4.1927, 3.7416, 3.7827], + device='cuda:0'), covar=tensor([0.0561, 0.0425, 0.0700, 0.0560, 0.0708, 0.0677, 0.0681, 0.1046], + device='cuda:0'), in_proj_covar=tensor([0.0166, 0.0160, 0.0210, 0.0171, 0.0163, 0.0158, 0.0185, 0.0223], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-27 22:45:13,755 INFO [train.py:892] (0/4) Epoch 8, batch 750, loss[loss=0.2291, simple_loss=0.284, pruned_loss=0.08707, over 19658.00 frames. ], tot_loss[loss=0.2559, simple_loss=0.3061, pruned_loss=0.1029, over 3859951.92 frames. ], batch size: 43, lr: 1.96e-02, grad_scale: 16.0 +2023-03-27 22:45:16,679 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=13737.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:46:57,364 INFO [train.py:892] (0/4) Epoch 8, batch 800, loss[loss=0.3093, simple_loss=0.3402, pruned_loss=0.1392, over 19705.00 frames. ], tot_loss[loss=0.2571, simple_loss=0.3074, pruned_loss=0.1034, over 3880966.67 frames. ], batch size: 295, lr: 1.95e-02, grad_scale: 16.0 +2023-03-27 22:47:22,153 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=13798.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 22:47:23,867 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8844, 4.4542, 4.7182, 4.3800, 4.7501, 3.3872, 3.9456, 2.8296], + device='cuda:0'), covar=tensor([0.0125, 0.0139, 0.0098, 0.0133, 0.0108, 0.0635, 0.0681, 0.0945], + device='cuda:0'), in_proj_covar=tensor([0.0076, 0.0098, 0.0086, 0.0098, 0.0089, 0.0109, 0.0121, 0.0104], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-27 22:47:40,986 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.356e+02 5.653e+02 6.887e+02 8.342e+02 1.511e+03, threshold=1.377e+03, percent-clipped=1.0 +2023-03-27 22:48:41,452 INFO [train.py:892] (0/4) Epoch 8, batch 850, loss[loss=0.206, simple_loss=0.2657, pruned_loss=0.07315, over 19789.00 frames. ], tot_loss[loss=0.2565, simple_loss=0.3069, pruned_loss=0.103, over 3897079.00 frames. ], batch size: 45, lr: 1.95e-02, grad_scale: 16.0 +2023-03-27 22:49:30,082 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9159, 3.7608, 3.8112, 3.5722, 3.9000, 2.9418, 3.1550, 2.0680], + device='cuda:0'), covar=tensor([0.0184, 0.0183, 0.0135, 0.0176, 0.0136, 0.0788, 0.0849, 0.1382], + device='cuda:0'), in_proj_covar=tensor([0.0076, 0.0098, 0.0086, 0.0099, 0.0089, 0.0109, 0.0122, 0.0105], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-27 22:49:54,905 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-27 22:50:25,539 INFO [train.py:892] (0/4) Epoch 8, batch 900, loss[loss=0.2657, simple_loss=0.3126, pruned_loss=0.1094, over 19790.00 frames. ], tot_loss[loss=0.2569, simple_loss=0.3073, pruned_loss=0.1032, over 3908904.83 frames. ], batch size: 193, lr: 1.95e-02, grad_scale: 16.0 +2023-03-27 22:51:07,627 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.950e+02 5.711e+02 6.914e+02 8.522e+02 2.139e+03, threshold=1.383e+03, percent-clipped=3.0 +2023-03-27 22:51:49,339 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.88 vs. limit=5.0 +2023-03-27 22:52:07,732 INFO [train.py:892] (0/4) Epoch 8, batch 950, loss[loss=0.3602, simple_loss=0.3981, pruned_loss=0.1611, over 19598.00 frames. ], tot_loss[loss=0.2584, simple_loss=0.3086, pruned_loss=0.104, over 3916717.97 frames. ], batch size: 367, lr: 1.94e-02, grad_scale: 16.0 +2023-03-27 22:53:29,732 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.58 vs. limit=2.0 +2023-03-27 22:53:53,085 INFO [train.py:892] (0/4) Epoch 8, batch 1000, loss[loss=0.2297, simple_loss=0.278, pruned_loss=0.0907, over 19813.00 frames. ], tot_loss[loss=0.2569, simple_loss=0.307, pruned_loss=0.1034, over 3925637.72 frames. ], batch size: 123, lr: 1.94e-02, grad_scale: 16.0 +2023-03-27 22:53:57,888 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-27 22:54:23,363 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-14000.pt +2023-03-27 22:54:42,898 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.512e+02 5.428e+02 6.416e+02 7.650e+02 1.405e+03, threshold=1.283e+03, percent-clipped=1.0 +2023-03-27 22:55:44,636 INFO [train.py:892] (0/4) Epoch 8, batch 1050, loss[loss=0.254, simple_loss=0.2991, pruned_loss=0.1044, over 19818.00 frames. ], tot_loss[loss=0.2554, simple_loss=0.3059, pruned_loss=0.1025, over 3931790.25 frames. ], batch size: 181, lr: 1.94e-02, grad_scale: 16.0 +2023-03-27 22:57:00,361 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14072.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:57:29,675 INFO [train.py:892] (0/4) Epoch 8, batch 1100, loss[loss=0.2624, simple_loss=0.3204, pruned_loss=0.1022, over 19810.00 frames. ], tot_loss[loss=0.2547, simple_loss=0.3058, pruned_loss=0.1018, over 3932867.52 frames. ], batch size: 67, lr: 1.93e-02, grad_scale: 16.0 +2023-03-27 22:57:43,871 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14093.0, num_to_drop=1, layers_to_drop={2} +2023-03-27 22:58:04,933 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14103.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:58:13,328 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.085e+02 5.555e+02 6.523e+02 7.495e+02 1.242e+03, threshold=1.305e+03, percent-clipped=0.0 +2023-03-27 22:58:40,397 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14120.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 22:59:06,788 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14133.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 22:59:11,548 INFO [train.py:892] (0/4) Epoch 8, batch 1150, loss[loss=0.2346, simple_loss=0.2945, pruned_loss=0.0873, over 19732.00 frames. ], tot_loss[loss=0.2561, simple_loss=0.3069, pruned_loss=0.1027, over 3936600.58 frames. ], batch size: 77, lr: 1.93e-02, grad_scale: 16.0 +2023-03-27 23:00:12,752 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14164.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:00:32,981 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.91 vs. limit=5.0 +2023-03-27 23:00:49,195 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14181.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 23:00:56,972 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3761, 5.6931, 5.6616, 5.5681, 5.4688, 5.6800, 5.0185, 4.9759], + device='cuda:0'), covar=tensor([0.0317, 0.0324, 0.0518, 0.0396, 0.0458, 0.0472, 0.0588, 0.0816], + device='cuda:0'), in_proj_covar=tensor([0.0173, 0.0164, 0.0210, 0.0171, 0.0164, 0.0161, 0.0188, 0.0225], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-27 23:00:58,061 INFO [train.py:892] (0/4) Epoch 8, batch 1200, loss[loss=0.256, simple_loss=0.3174, pruned_loss=0.09728, over 19691.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.3074, pruned_loss=0.1026, over 3939753.00 frames. ], batch size: 75, lr: 1.93e-02, grad_scale: 16.0 +2023-03-27 23:01:41,591 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.316e+02 5.724e+02 6.994e+02 8.657e+02 1.468e+03, threshold=1.399e+03, percent-clipped=2.0 +2023-03-27 23:02:43,473 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=5.10 vs. limit=5.0 +2023-03-27 23:02:44,258 INFO [train.py:892] (0/4) Epoch 8, batch 1250, loss[loss=0.2093, simple_loss=0.2651, pruned_loss=0.07672, over 19862.00 frames. ], tot_loss[loss=0.2539, simple_loss=0.3052, pruned_loss=0.1013, over 3942262.00 frames. ], batch size: 122, lr: 1.92e-02, grad_scale: 16.0 +2023-03-27 23:04:27,623 INFO [train.py:892] (0/4) Epoch 8, batch 1300, loss[loss=0.2809, simple_loss=0.3471, pruned_loss=0.1073, over 19833.00 frames. ], tot_loss[loss=0.2533, simple_loss=0.3048, pruned_loss=0.1009, over 3944310.33 frames. ], batch size: 57, lr: 1.92e-02, grad_scale: 16.0 +2023-03-27 23:05:14,651 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.149e+02 5.379e+02 6.577e+02 8.584e+02 1.412e+03, threshold=1.315e+03, percent-clipped=1.0 +2023-03-27 23:06:13,949 INFO [train.py:892] (0/4) Epoch 8, batch 1350, loss[loss=0.2412, simple_loss=0.3001, pruned_loss=0.09119, over 19675.00 frames. ], tot_loss[loss=0.2551, simple_loss=0.3063, pruned_loss=0.1019, over 3945891.87 frames. ], batch size: 49, lr: 1.92e-02, grad_scale: 16.0 +2023-03-27 23:06:20,561 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8820, 3.1804, 3.4099, 3.9016, 2.7077, 3.2567, 2.6487, 2.3902], + device='cuda:0'), covar=tensor([0.0383, 0.2599, 0.0817, 0.0225, 0.2087, 0.0529, 0.1187, 0.1763], + device='cuda:0'), in_proj_covar=tensor([0.0174, 0.0330, 0.0213, 0.0130, 0.0232, 0.0157, 0.0188, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0001, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 23:07:58,323 INFO [train.py:892] (0/4) Epoch 8, batch 1400, loss[loss=0.22, simple_loss=0.2805, pruned_loss=0.07973, over 19800.00 frames. ], tot_loss[loss=0.2549, simple_loss=0.3056, pruned_loss=0.1021, over 3947033.73 frames. ], batch size: 150, lr: 1.92e-02, grad_scale: 16.0 +2023-03-27 23:08:13,124 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14393.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:08:40,808 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.031e+02 5.328e+02 6.639e+02 8.222e+02 1.941e+03, threshold=1.328e+03, percent-clipped=1.0 +2023-03-27 23:08:55,933 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14413.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:09:24,591 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14428.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:09:41,894 INFO [train.py:892] (0/4) Epoch 8, batch 1450, loss[loss=0.2352, simple_loss=0.2843, pruned_loss=0.09311, over 19822.00 frames. ], tot_loss[loss=0.2539, simple_loss=0.3051, pruned_loss=0.1013, over 3947893.59 frames. ], batch size: 187, lr: 1.91e-02, grad_scale: 16.0 +2023-03-27 23:09:52,297 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14441.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:10:16,196 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0321, 5.3303, 5.3654, 5.3539, 5.0366, 5.3176, 4.7012, 4.8806], + device='cuda:0'), covar=tensor([0.0337, 0.0348, 0.0549, 0.0368, 0.0497, 0.0516, 0.0657, 0.0875], + device='cuda:0'), in_proj_covar=tensor([0.0173, 0.0166, 0.0214, 0.0174, 0.0167, 0.0160, 0.0189, 0.0228], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-27 23:10:29,092 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14459.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:10:59,925 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14474.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:11:03,488 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14476.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 23:11:24,884 INFO [train.py:892] (0/4) Epoch 8, batch 1500, loss[loss=0.2291, simple_loss=0.2915, pruned_loss=0.08336, over 19775.00 frames. ], tot_loss[loss=0.2534, simple_loss=0.3051, pruned_loss=0.1009, over 3948385.94 frames. ], batch size: 52, lr: 1.91e-02, grad_scale: 32.0 +2023-03-27 23:11:27,459 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14487.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:12:08,353 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.288e+02 5.447e+02 6.433e+02 7.944e+02 1.401e+03, threshold=1.287e+03, percent-clipped=2.0 +2023-03-27 23:13:06,140 INFO [train.py:892] (0/4) Epoch 8, batch 1550, loss[loss=0.2232, simple_loss=0.2798, pruned_loss=0.08334, over 19686.00 frames. ], tot_loss[loss=0.2535, simple_loss=0.3047, pruned_loss=0.1011, over 3948948.90 frames. ], batch size: 74, lr: 1.91e-02, grad_scale: 16.0 +2023-03-27 23:13:18,586 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6506, 2.0596, 2.7654, 3.2290, 3.8633, 4.0668, 4.0657, 4.1446], + device='cuda:0'), covar=tensor([0.0808, 0.1992, 0.1003, 0.0441, 0.0229, 0.0119, 0.0150, 0.0307], + device='cuda:0'), in_proj_covar=tensor([0.0124, 0.0163, 0.0138, 0.0114, 0.0098, 0.0090, 0.0085, 0.0089], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 23:13:33,838 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14548.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:14:11,759 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4419, 2.9189, 4.4255, 3.7573, 4.1000, 4.3795, 4.2083, 4.0357], + device='cuda:0'), covar=tensor([0.0102, 0.0560, 0.0072, 0.0880, 0.0085, 0.0142, 0.0104, 0.0079], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0081, 0.0063, 0.0137, 0.0058, 0.0071, 0.0067, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 23:14:54,498 INFO [train.py:892] (0/4) Epoch 8, batch 1600, loss[loss=0.2773, simple_loss=0.3287, pruned_loss=0.113, over 19748.00 frames. ], tot_loss[loss=0.2519, simple_loss=0.3036, pruned_loss=0.1001, over 3950401.71 frames. ], batch size: 44, lr: 1.90e-02, grad_scale: 16.0 +2023-03-27 23:15:06,487 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.05 vs. limit=2.0 +2023-03-27 23:15:42,283 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.460e+02 5.480e+02 6.486e+02 8.210e+02 1.658e+03, threshold=1.297e+03, percent-clipped=3.0 +2023-03-27 23:16:47,976 INFO [train.py:892] (0/4) Epoch 8, batch 1650, loss[loss=0.1793, simple_loss=0.2347, pruned_loss=0.06193, over 19754.00 frames. ], tot_loss[loss=0.2517, simple_loss=0.3034, pruned_loss=0.1, over 3951227.22 frames. ], batch size: 102, lr: 1.90e-02, grad_scale: 16.0 +2023-03-27 23:18:38,583 INFO [train.py:892] (0/4) Epoch 8, batch 1700, loss[loss=0.2394, simple_loss=0.2985, pruned_loss=0.09013, over 19798.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.3027, pruned_loss=0.09922, over 3951742.29 frames. ], batch size: 51, lr: 1.90e-02, grad_scale: 16.0 +2023-03-27 23:19:29,027 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.414e+02 5.107e+02 6.150e+02 7.747e+02 1.488e+03, threshold=1.230e+03, percent-clipped=2.0 +2023-03-27 23:19:38,766 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-03-27 23:20:11,004 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14728.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:20:25,101 INFO [train.py:892] (0/4) Epoch 8, batch 1750, loss[loss=0.3748, simple_loss=0.4061, pruned_loss=0.1717, over 19430.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3015, pruned_loss=0.09791, over 3952767.31 frames. ], batch size: 412, lr: 1.90e-02, grad_scale: 16.0 +2023-03-27 23:20:32,918 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14740.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:21:11,335 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14759.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:21:29,649 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14769.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:21:41,281 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14776.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:21:41,444 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14776.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 23:21:57,269 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0669, 5.0834, 5.5403, 5.3198, 5.3288, 4.7483, 5.1670, 5.0612], + device='cuda:0'), covar=tensor([0.1251, 0.1101, 0.0855, 0.0952, 0.0662, 0.0974, 0.1888, 0.1834], + device='cuda:0'), in_proj_covar=tensor([0.0232, 0.0216, 0.0275, 0.0215, 0.0210, 0.0206, 0.0268, 0.0298], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-27 23:22:00,369 INFO [train.py:892] (0/4) Epoch 8, batch 1800, loss[loss=0.2261, simple_loss=0.2873, pruned_loss=0.08243, over 19890.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.3012, pruned_loss=0.09757, over 3952609.71 frames. ], batch size: 47, lr: 1.89e-02, grad_scale: 16.0 +2023-03-27 23:22:07,942 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3029, 4.0875, 4.1781, 3.8554, 4.2836, 3.2301, 3.6270, 2.3201], + device='cuda:0'), covar=tensor([0.0194, 0.0190, 0.0136, 0.0184, 0.0135, 0.0666, 0.0613, 0.1157], + device='cuda:0'), in_proj_covar=tensor([0.0078, 0.0102, 0.0089, 0.0103, 0.0091, 0.0114, 0.0123, 0.0106], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-27 23:22:28,296 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14801.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:22:39,625 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14807.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:22:41,053 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.298e+02 5.336e+02 6.514e+02 7.931e+02 1.246e+03, threshold=1.303e+03, percent-clipped=1.0 +2023-03-27 23:23:10,490 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14824.0, num_to_drop=1, layers_to_drop={0} +2023-03-27 23:23:30,838 INFO [train.py:892] (0/4) Epoch 8, batch 1850, loss[loss=0.2498, simple_loss=0.3077, pruned_loss=0.09593, over 19672.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3054, pruned_loss=0.09914, over 3950515.33 frames. ], batch size: 55, lr: 1.89e-02, grad_scale: 16.0 +2023-03-27 23:23:38,411 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-8.pt +2023-03-27 23:24:36,972 INFO [train.py:892] (0/4) Epoch 9, batch 0, loss[loss=0.232, simple_loss=0.2825, pruned_loss=0.0908, over 19866.00 frames. ], tot_loss[loss=0.232, simple_loss=0.2825, pruned_loss=0.0908, over 19866.00 frames. ], batch size: 129, lr: 1.79e-02, grad_scale: 16.0 +2023-03-27 23:24:36,974 INFO [train.py:917] (0/4) Computing validation loss +2023-03-27 23:25:11,157 INFO [train.py:926] (0/4) Epoch 9, validation: loss=0.1843, simple_loss=0.2646, pruned_loss=0.05198, over 2883724.00 frames. +2023-03-27 23:25:11,159 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-27 23:25:16,053 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14843.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:26:58,560 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.8160, 6.1681, 6.1184, 6.0133, 5.9012, 6.0329, 5.3700, 5.5438], + device='cuda:0'), covar=tensor([0.0317, 0.0344, 0.0520, 0.0330, 0.0531, 0.0613, 0.0555, 0.0747], + device='cuda:0'), in_proj_covar=tensor([0.0180, 0.0175, 0.0218, 0.0178, 0.0170, 0.0161, 0.0195, 0.0232], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-27 23:27:06,400 INFO [train.py:892] (0/4) Epoch 9, batch 50, loss[loss=0.2356, simple_loss=0.2942, pruned_loss=0.08844, over 19836.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.2997, pruned_loss=0.1003, over 891320.57 frames. ], batch size: 90, lr: 1.79e-02, grad_scale: 16.0 +2023-03-27 23:27:44,393 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.418e+02 5.041e+02 5.916e+02 7.576e+02 1.365e+03, threshold=1.183e+03, percent-clipped=1.0 +2023-03-27 23:29:00,082 INFO [train.py:892] (0/4) Epoch 9, batch 100, loss[loss=0.2352, simple_loss=0.2942, pruned_loss=0.08804, over 19883.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.2945, pruned_loss=0.0947, over 1570727.98 frames. ], batch size: 88, lr: 1.78e-02, grad_scale: 16.0 +2023-03-27 23:30:00,641 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0902, 3.7592, 5.0170, 4.0801, 4.6398, 4.9181, 4.8483, 4.6933], + device='cuda:0'), covar=tensor([0.0110, 0.0420, 0.0075, 0.1026, 0.0090, 0.0136, 0.0092, 0.0075], + device='cuda:0'), in_proj_covar=tensor([0.0071, 0.0079, 0.0064, 0.0136, 0.0058, 0.0072, 0.0068, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 23:30:48,429 INFO [train.py:892] (0/4) Epoch 9, batch 150, loss[loss=0.243, simple_loss=0.297, pruned_loss=0.09449, over 19740.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.2958, pruned_loss=0.09583, over 2098748.80 frames. ], batch size: 77, lr: 1.78e-02, grad_scale: 16.0 +2023-03-27 23:31:05,036 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4230, 2.3299, 1.1819, 2.7699, 2.5635, 2.6478, 2.8369, 2.2112], + device='cuda:0'), covar=tensor([0.0563, 0.0606, 0.1720, 0.0504, 0.0538, 0.0369, 0.0401, 0.0766], + device='cuda:0'), in_proj_covar=tensor([0.0112, 0.0106, 0.0125, 0.0110, 0.0096, 0.0088, 0.0101, 0.0114], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-27 23:31:28,932 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.328e+02 4.885e+02 5.781e+02 7.549e+02 1.487e+03, threshold=1.156e+03, percent-clipped=3.0 +2023-03-27 23:32:44,991 INFO [train.py:892] (0/4) Epoch 9, batch 200, loss[loss=0.2034, simple_loss=0.2672, pruned_loss=0.0698, over 19648.00 frames. ], tot_loss[loss=0.249, simple_loss=0.3013, pruned_loss=0.09833, over 2507572.34 frames. ], batch size: 47, lr: 1.78e-02, grad_scale: 16.0 +2023-03-27 23:33:17,223 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5073, 2.4436, 1.4423, 2.8674, 2.6963, 2.7930, 2.9267, 2.2803], + device='cuda:0'), covar=tensor([0.0567, 0.0599, 0.1497, 0.0452, 0.0450, 0.0426, 0.0412, 0.0770], + device='cuda:0'), in_proj_covar=tensor([0.0113, 0.0109, 0.0127, 0.0114, 0.0097, 0.0090, 0.0103, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-27 23:33:25,029 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8276, 4.3455, 4.2999, 4.7658, 4.3687, 4.9422, 4.9074, 4.9884], + device='cuda:0'), covar=tensor([0.0543, 0.0320, 0.0442, 0.0240, 0.0546, 0.0216, 0.0358, 0.0252], + device='cuda:0'), in_proj_covar=tensor([0.0120, 0.0135, 0.0161, 0.0135, 0.0133, 0.0113, 0.0129, 0.0152], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 23:33:43,082 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-27 23:33:47,740 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15069.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:34:38,150 INFO [train.py:892] (0/4) Epoch 9, batch 250, loss[loss=0.2428, simple_loss=0.2926, pruned_loss=0.09648, over 19817.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.2995, pruned_loss=0.09695, over 2827603.13 frames. ], batch size: 202, lr: 1.78e-02, grad_scale: 16.0 +2023-03-27 23:34:48,851 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=15096.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:35:06,337 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3881, 4.7626, 5.0472, 4.8621, 5.2991, 3.4630, 4.2187, 2.7284], + device='cuda:0'), covar=tensor([0.0161, 0.0191, 0.0130, 0.0155, 0.0148, 0.0679, 0.0800, 0.1242], + device='cuda:0'), in_proj_covar=tensor([0.0079, 0.0103, 0.0088, 0.0102, 0.0091, 0.0114, 0.0124, 0.0108], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-27 23:35:13,647 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.451e+02 5.417e+02 6.236e+02 8.058e+02 1.427e+03, threshold=1.247e+03, percent-clipped=5.0 +2023-03-27 23:35:35,270 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15117.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:36:26,564 INFO [train.py:892] (0/4) Epoch 9, batch 300, loss[loss=0.2276, simple_loss=0.2992, pruned_loss=0.07803, over 19675.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3033, pruned_loss=0.09827, over 3073012.94 frames. ], batch size: 49, lr: 1.77e-02, grad_scale: 16.0 +2023-03-27 23:36:33,497 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15143.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:38:09,545 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6915, 4.2767, 4.2740, 4.6793, 4.4114, 4.8788, 4.7983, 4.9508], + device='cuda:0'), covar=tensor([0.0678, 0.0293, 0.0428, 0.0241, 0.0468, 0.0202, 0.0310, 0.0261], + device='cuda:0'), in_proj_covar=tensor([0.0120, 0.0136, 0.0162, 0.0135, 0.0131, 0.0111, 0.0129, 0.0152], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 23:38:17,841 INFO [train.py:892] (0/4) Epoch 9, batch 350, loss[loss=0.2236, simple_loss=0.2839, pruned_loss=0.08169, over 19810.00 frames. ], tot_loss[loss=0.2489, simple_loss=0.302, pruned_loss=0.09783, over 3268916.54 frames. ], batch size: 72, lr: 1.77e-02, grad_scale: 16.0 +2023-03-27 23:38:18,646 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15191.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:38:27,456 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15195.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:38:52,906 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.594e+02 5.622e+02 6.774e+02 8.741e+02 1.605e+03, threshold=1.355e+03, percent-clipped=2.0 +2023-03-27 23:40:06,025 INFO [train.py:892] (0/4) Epoch 9, batch 400, loss[loss=0.2139, simple_loss=0.2679, pruned_loss=0.07991, over 19776.00 frames. ], tot_loss[loss=0.248, simple_loss=0.3017, pruned_loss=0.09719, over 3418425.18 frames. ], batch size: 154, lr: 1.77e-02, grad_scale: 16.0 +2023-03-27 23:40:40,949 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15256.0, num_to_drop=1, layers_to_drop={3} +2023-03-27 23:40:50,839 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-27 23:41:58,926 INFO [train.py:892] (0/4) Epoch 9, batch 450, loss[loss=0.2648, simple_loss=0.3184, pruned_loss=0.1056, over 19832.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3016, pruned_loss=0.09712, over 3536764.30 frames. ], batch size: 52, lr: 1.76e-02, grad_scale: 16.0 +2023-03-27 23:42:14,131 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4469, 3.9982, 4.0177, 4.4716, 4.2704, 4.5075, 4.5532, 4.7150], + device='cuda:0'), covar=tensor([0.0580, 0.0327, 0.0473, 0.0239, 0.0459, 0.0276, 0.0328, 0.0262], + device='cuda:0'), in_proj_covar=tensor([0.0120, 0.0137, 0.0162, 0.0136, 0.0133, 0.0114, 0.0129, 0.0151], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 23:42:28,687 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.52 vs. limit=5.0 +2023-03-27 23:42:37,303 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.510e+02 5.167e+02 6.128e+02 7.422e+02 1.689e+03, threshold=1.226e+03, percent-clipped=1.0 +2023-03-27 23:43:54,444 INFO [train.py:892] (0/4) Epoch 9, batch 500, loss[loss=0.2026, simple_loss=0.2683, pruned_loss=0.06846, over 19798.00 frames. ], tot_loss[loss=0.247, simple_loss=0.3005, pruned_loss=0.09676, over 3628509.67 frames. ], batch size: 86, lr: 1.76e-02, grad_scale: 16.0 +2023-03-27 23:45:44,800 INFO [train.py:892] (0/4) Epoch 9, batch 550, loss[loss=0.2338, simple_loss=0.2853, pruned_loss=0.0911, over 19781.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.2998, pruned_loss=0.09596, over 3699694.26 frames. ], batch size: 178, lr: 1.76e-02, grad_scale: 16.0 +2023-03-27 23:45:54,949 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15396.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:46:11,228 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1411, 2.4603, 1.9933, 1.5447, 2.2364, 2.4884, 2.2257, 2.5470], + device='cuda:0'), covar=tensor([0.0228, 0.0208, 0.0200, 0.0522, 0.0304, 0.0167, 0.0161, 0.0128], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0049, 0.0055, 0.0069, 0.0069, 0.0047, 0.0042, 0.0045], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-27 23:46:26,491 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.786e+02 5.578e+02 6.357e+02 7.832e+02 1.667e+03, threshold=1.271e+03, percent-clipped=4.0 +2023-03-27 23:47:38,382 INFO [train.py:892] (0/4) Epoch 9, batch 600, loss[loss=0.2583, simple_loss=0.3167, pruned_loss=0.09995, over 19695.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.3012, pruned_loss=0.09648, over 3753264.50 frames. ], batch size: 74, lr: 1.76e-02, grad_scale: 16.0 +2023-03-27 23:47:47,229 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15444.0, num_to_drop=0, layers_to_drop=set() +2023-03-27 23:49:30,486 INFO [train.py:892] (0/4) Epoch 9, batch 650, loss[loss=0.2391, simple_loss=0.3002, pruned_loss=0.08897, over 19852.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.3016, pruned_loss=0.09685, over 3797288.84 frames. ], batch size: 49, lr: 1.75e-02, grad_scale: 16.0 +2023-03-27 23:49:47,728 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4336, 2.4779, 3.6038, 2.7975, 3.2776, 3.5954, 2.0589, 2.1439], + device='cuda:0'), covar=tensor([0.0649, 0.2497, 0.0416, 0.0618, 0.1034, 0.0507, 0.1386, 0.1987], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0314, 0.0234, 0.0197, 0.0301, 0.0218, 0.0249, 0.0239], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 23:49:56,427 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0722, 2.5010, 3.1633, 3.4621, 3.8165, 4.4580, 4.2625, 4.4249], + device='cuda:0'), covar=tensor([0.0703, 0.1760, 0.0895, 0.0422, 0.0252, 0.0144, 0.0160, 0.0296], + device='cuda:0'), in_proj_covar=tensor([0.0128, 0.0168, 0.0140, 0.0119, 0.0100, 0.0094, 0.0090, 0.0092], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 23:50:08,031 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.542e+02 5.095e+02 6.402e+02 7.692e+02 1.403e+03, threshold=1.280e+03, percent-clipped=2.0 +2023-03-27 23:51:23,093 INFO [train.py:892] (0/4) Epoch 9, batch 700, loss[loss=0.2356, simple_loss=0.298, pruned_loss=0.08659, over 19712.00 frames. ], tot_loss[loss=0.2473, simple_loss=0.3012, pruned_loss=0.0967, over 3831717.51 frames. ], batch size: 62, lr: 1.75e-02, grad_scale: 16.0 +2023-03-27 23:51:45,123 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=15551.0, num_to_drop=1, layers_to_drop={1} +2023-03-27 23:51:45,342 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8311, 2.4590, 2.9786, 2.9481, 3.0764, 3.1036, 3.7461, 3.9955], + device='cuda:0'), covar=tensor([0.0520, 0.1553, 0.1422, 0.1515, 0.1642, 0.1220, 0.0362, 0.0335], + device='cuda:0'), in_proj_covar=tensor([0.0187, 0.0207, 0.0220, 0.0225, 0.0241, 0.0216, 0.0153, 0.0160], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 23:53:00,089 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2663, 3.3490, 3.5454, 4.2621, 2.8673, 3.4805, 2.8392, 2.5087], + device='cuda:0'), covar=tensor([0.0371, 0.2594, 0.0787, 0.0180, 0.2033, 0.0554, 0.1101, 0.1833], + device='cuda:0'), in_proj_covar=tensor([0.0182, 0.0327, 0.0214, 0.0137, 0.0231, 0.0161, 0.0193, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 23:53:15,935 INFO [train.py:892] (0/4) Epoch 9, batch 750, loss[loss=0.275, simple_loss=0.3216, pruned_loss=0.1142, over 19875.00 frames. ], tot_loss[loss=0.2459, simple_loss=0.3002, pruned_loss=0.09579, over 3857456.45 frames. ], batch size: 159, lr: 1.75e-02, grad_scale: 16.0 +2023-03-27 23:53:40,712 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.82 vs. limit=5.0 +2023-03-27 23:53:54,114 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.826e+02 5.119e+02 6.405e+02 7.720e+02 1.158e+03, threshold=1.281e+03, percent-clipped=0.0 +2023-03-27 23:53:59,763 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.90 vs. limit=2.0 +2023-03-27 23:54:04,163 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0914, 5.0014, 5.4765, 5.2413, 5.2039, 4.9087, 5.0995, 4.9730], + device='cuda:0'), covar=tensor([0.1233, 0.1352, 0.0860, 0.0970, 0.0680, 0.0868, 0.1718, 0.2059], + device='cuda:0'), in_proj_covar=tensor([0.0232, 0.0221, 0.0280, 0.0215, 0.0211, 0.0205, 0.0276, 0.0303], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-27 23:55:10,141 INFO [train.py:892] (0/4) Epoch 9, batch 800, loss[loss=0.2477, simple_loss=0.3062, pruned_loss=0.09454, over 19640.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.2999, pruned_loss=0.09573, over 3878102.82 frames. ], batch size: 79, lr: 1.75e-02, grad_scale: 16.0 +2023-03-27 23:57:05,116 INFO [train.py:892] (0/4) Epoch 9, batch 850, loss[loss=0.2242, simple_loss=0.282, pruned_loss=0.08325, over 19630.00 frames. ], tot_loss[loss=0.2451, simple_loss=0.2996, pruned_loss=0.09533, over 3894162.63 frames. ], batch size: 68, lr: 1.74e-02, grad_scale: 16.0 +2023-03-27 23:57:18,777 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1861, 4.4390, 4.4964, 4.6104, 4.2025, 4.4543, 4.1558, 3.6215], + device='cuda:0'), covar=tensor([0.0988, 0.1023, 0.1197, 0.0835, 0.1113, 0.1059, 0.1087, 0.2597], + device='cuda:0'), in_proj_covar=tensor([0.0180, 0.0175, 0.0224, 0.0179, 0.0176, 0.0164, 0.0194, 0.0234], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-27 23:57:34,994 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.59 vs. limit=2.0 +2023-03-27 23:57:44,965 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.938e+02 5.306e+02 6.456e+02 8.498e+02 2.233e+03, threshold=1.291e+03, percent-clipped=2.0 +2023-03-27 23:57:56,131 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8416, 2.4798, 2.9648, 3.4858, 3.7352, 4.1266, 4.0217, 4.2824], + device='cuda:0'), covar=tensor([0.0771, 0.1795, 0.0957, 0.0379, 0.0327, 0.0154, 0.0244, 0.0346], + device='cuda:0'), in_proj_covar=tensor([0.0125, 0.0167, 0.0138, 0.0117, 0.0100, 0.0093, 0.0089, 0.0090], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-27 23:58:58,983 INFO [train.py:892] (0/4) Epoch 9, batch 900, loss[loss=0.2125, simple_loss=0.262, pruned_loss=0.0815, over 19736.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.2985, pruned_loss=0.09488, over 3908185.25 frames. ], batch size: 118, lr: 1.74e-02, grad_scale: 16.0 +2023-03-28 00:00:51,422 INFO [train.py:892] (0/4) Epoch 9, batch 950, loss[loss=0.2369, simple_loss=0.288, pruned_loss=0.0929, over 19756.00 frames. ], tot_loss[loss=0.2466, simple_loss=0.3007, pruned_loss=0.09625, over 3917332.28 frames. ], batch size: 188, lr: 1.74e-02, grad_scale: 16.0 +2023-03-28 00:01:31,054 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.223e+02 4.952e+02 6.049e+02 8.217e+02 1.611e+03, threshold=1.210e+03, percent-clipped=3.0 +2023-03-28 00:02:47,452 INFO [train.py:892] (0/4) Epoch 9, batch 1000, loss[loss=0.264, simple_loss=0.309, pruned_loss=0.1095, over 19741.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.2994, pruned_loss=0.09556, over 3925138.65 frames. ], batch size: 259, lr: 1.74e-02, grad_scale: 16.0 +2023-03-28 00:03:11,007 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15851.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:03:29,260 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.0443, 2.0902, 2.4805, 1.9890, 1.7870, 1.8810, 1.8402, 2.2279], + device='cuda:0'), covar=tensor([0.0201, 0.0247, 0.0181, 0.0295, 0.0378, 0.0290, 0.0452, 0.0172], + device='cuda:0'), in_proj_covar=tensor([0.0042, 0.0042, 0.0044, 0.0037, 0.0046, 0.0044, 0.0059, 0.0040], + device='cuda:0'), out_proj_covar=tensor([9.2580e-05, 9.3763e-05, 9.7266e-05, 8.3650e-05, 1.0376e-04, 9.8190e-05, + 1.2850e-04, 9.2399e-05], device='cuda:0') +2023-03-28 00:04:24,294 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15883.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:04:41,516 INFO [train.py:892] (0/4) Epoch 9, batch 1050, loss[loss=0.258, simple_loss=0.3161, pruned_loss=0.09994, over 19677.00 frames. ], tot_loss[loss=0.245, simple_loss=0.2992, pruned_loss=0.09537, over 3931185.99 frames. ], batch size: 64, lr: 1.73e-02, grad_scale: 16.0 +2023-03-28 00:04:58,487 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15899.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:05:01,383 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-28 00:05:18,403 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.416e+02 5.707e+02 6.582e+02 7.521e+02 1.130e+03, threshold=1.316e+03, percent-clipped=0.0 +2023-03-28 00:06:34,645 INFO [train.py:892] (0/4) Epoch 9, batch 1100, loss[loss=0.2028, simple_loss=0.2684, pruned_loss=0.06854, over 19706.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.3002, pruned_loss=0.09571, over 3935038.91 frames. ], batch size: 101, lr: 1.73e-02, grad_scale: 16.0 +2023-03-28 00:06:42,205 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15944.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:08:26,895 INFO [train.py:892] (0/4) Epoch 9, batch 1150, loss[loss=0.2264, simple_loss=0.2958, pruned_loss=0.07854, over 19799.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3009, pruned_loss=0.09579, over 3936543.05 frames. ], batch size: 51, lr: 1.73e-02, grad_scale: 16.0 +2023-03-28 00:08:40,259 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3645, 3.3939, 1.8474, 4.3210, 3.6810, 4.1698, 4.2196, 3.0432], + device='cuda:0'), covar=tensor([0.0475, 0.0434, 0.1570, 0.0328, 0.0449, 0.0329, 0.0482, 0.0666], + device='cuda:0'), in_proj_covar=tensor([0.0114, 0.0108, 0.0126, 0.0112, 0.0097, 0.0091, 0.0105, 0.0114], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 00:08:45,816 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-16000.pt +2023-03-28 00:08:54,344 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16001.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:09:09,128 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.185e+02 5.244e+02 6.208e+02 7.678e+02 1.349e+03, threshold=1.242e+03, percent-clipped=1.0 +2023-03-28 00:09:30,393 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5813, 2.0794, 2.7824, 3.0701, 3.4176, 3.5759, 3.6682, 3.7497], + device='cuda:0'), covar=tensor([0.0789, 0.1960, 0.0954, 0.0500, 0.0430, 0.0227, 0.0279, 0.0220], + device='cuda:0'), in_proj_covar=tensor([0.0129, 0.0172, 0.0145, 0.0122, 0.0104, 0.0098, 0.0090, 0.0094], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 00:10:22,658 INFO [train.py:892] (0/4) Epoch 9, batch 1200, loss[loss=0.2435, simple_loss=0.2949, pruned_loss=0.09598, over 19712.00 frames. ], tot_loss[loss=0.2487, simple_loss=0.3033, pruned_loss=0.09704, over 3936776.17 frames. ], batch size: 60, lr: 1.73e-02, grad_scale: 16.0 +2023-03-28 00:11:10,971 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16062.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:11:54,127 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6268, 4.5824, 5.0431, 4.7647, 4.8916, 4.2757, 4.6470, 4.5205], + device='cuda:0'), covar=tensor([0.1143, 0.1259, 0.0864, 0.1034, 0.0699, 0.1074, 0.1994, 0.2138], + device='cuda:0'), in_proj_covar=tensor([0.0231, 0.0223, 0.0279, 0.0217, 0.0212, 0.0209, 0.0275, 0.0306], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 00:12:03,695 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16085.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:12:15,721 INFO [train.py:892] (0/4) Epoch 9, batch 1250, loss[loss=0.2305, simple_loss=0.2854, pruned_loss=0.08777, over 19751.00 frames. ], tot_loss[loss=0.2473, simple_loss=0.3024, pruned_loss=0.09609, over 3937851.99 frames. ], batch size: 71, lr: 1.72e-02, grad_scale: 16.0 +2023-03-28 00:12:22,834 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-28 00:12:26,705 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-28 00:12:49,934 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.594e+02 5.905e+02 6.787e+02 8.144e+02 1.550e+03, threshold=1.357e+03, percent-clipped=6.0 +2023-03-28 00:12:53,324 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.1764, 5.6318, 5.9066, 5.7717, 5.6486, 5.2003, 5.4605, 5.5537], + device='cuda:0'), covar=tensor([0.1214, 0.0961, 0.0901, 0.0766, 0.0646, 0.0832, 0.1988, 0.1997], + device='cuda:0'), in_proj_covar=tensor([0.0230, 0.0222, 0.0277, 0.0213, 0.0212, 0.0207, 0.0271, 0.0305], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 00:14:05,680 INFO [train.py:892] (0/4) Epoch 9, batch 1300, loss[loss=0.2245, simple_loss=0.2838, pruned_loss=0.0826, over 19741.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.3015, pruned_loss=0.09571, over 3940506.88 frames. ], batch size: 71, lr: 1.72e-02, grad_scale: 16.0 +2023-03-28 00:14:17,824 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16146.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:14:25,304 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16149.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:14:47,935 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9749, 4.4025, 4.5553, 5.0737, 4.6963, 5.2300, 5.0827, 5.2753], + device='cuda:0'), covar=tensor([0.0612, 0.0305, 0.0390, 0.0185, 0.0480, 0.0209, 0.0322, 0.0287], + device='cuda:0'), in_proj_covar=tensor([0.0119, 0.0138, 0.0162, 0.0136, 0.0134, 0.0117, 0.0125, 0.0153], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 00:15:57,801 INFO [train.py:892] (0/4) Epoch 9, batch 1350, loss[loss=0.2474, simple_loss=0.2957, pruned_loss=0.09958, over 19754.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.299, pruned_loss=0.09417, over 3943443.65 frames. ], batch size: 205, lr: 1.72e-02, grad_scale: 16.0 +2023-03-28 00:16:20,913 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.84 vs. limit=5.0 +2023-03-28 00:16:35,999 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.606e+02 5.207e+02 6.312e+02 7.771e+02 1.214e+03, threshold=1.262e+03, percent-clipped=0.0 +2023-03-28 00:16:41,099 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16210.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:17:50,308 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16239.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:17:53,609 INFO [train.py:892] (0/4) Epoch 9, batch 1400, loss[loss=0.2633, simple_loss=0.3316, pruned_loss=0.09757, over 19696.00 frames. ], tot_loss[loss=0.2469, simple_loss=0.3017, pruned_loss=0.09612, over 3942395.17 frames. ], batch size: 48, lr: 1.72e-02, grad_scale: 16.0 +2023-03-28 00:19:46,043 INFO [train.py:892] (0/4) Epoch 9, batch 1450, loss[loss=0.2831, simple_loss=0.3263, pruned_loss=0.12, over 19785.00 frames. ], tot_loss[loss=0.2436, simple_loss=0.299, pruned_loss=0.09411, over 3945161.87 frames. ], batch size: 236, lr: 1.71e-02, grad_scale: 16.0 +2023-03-28 00:19:47,168 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0544, 3.0223, 1.6444, 3.8151, 3.4699, 3.7372, 3.8779, 2.8103], + device='cuda:0'), covar=tensor([0.0487, 0.0495, 0.1574, 0.0368, 0.0350, 0.0294, 0.0333, 0.0642], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0111, 0.0129, 0.0115, 0.0100, 0.0094, 0.0108, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 00:20:26,642 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.895e+02 4.802e+02 5.731e+02 6.907e+02 1.143e+03, threshold=1.146e+03, percent-clipped=0.0 +2023-03-28 00:21:18,117 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3314, 4.2012, 4.3046, 4.0322, 4.4025, 3.1359, 3.4864, 2.3117], + device='cuda:0'), covar=tensor([0.0330, 0.0208, 0.0186, 0.0202, 0.0214, 0.0876, 0.0973, 0.1481], + device='cuda:0'), in_proj_covar=tensor([0.0080, 0.0107, 0.0093, 0.0107, 0.0094, 0.0116, 0.0128, 0.0111], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 00:21:40,554 INFO [train.py:892] (0/4) Epoch 9, batch 1500, loss[loss=0.2349, simple_loss=0.2927, pruned_loss=0.08857, over 19687.00 frames. ], tot_loss[loss=0.2436, simple_loss=0.2985, pruned_loss=0.09433, over 3947267.77 frames. ], batch size: 75, lr: 1.71e-02, grad_scale: 16.0 +2023-03-28 00:22:16,981 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16357.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:22:45,786 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16371.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:23:27,936 INFO [train.py:892] (0/4) Epoch 9, batch 1550, loss[loss=0.2155, simple_loss=0.2777, pruned_loss=0.07663, over 19753.00 frames. ], tot_loss[loss=0.2457, simple_loss=0.3001, pruned_loss=0.09563, over 3946641.43 frames. ], batch size: 97, lr: 1.71e-02, grad_scale: 16.0 +2023-03-28 00:24:05,933 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.814e+02 5.273e+02 6.072e+02 7.301e+02 1.702e+03, threshold=1.214e+03, percent-clipped=5.0 +2023-03-28 00:25:00,791 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16432.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:25:19,716 INFO [train.py:892] (0/4) Epoch 9, batch 1600, loss[loss=0.2194, simple_loss=0.2837, pruned_loss=0.07749, over 19816.00 frames. ], tot_loss[loss=0.2433, simple_loss=0.2984, pruned_loss=0.09411, over 3947151.52 frames. ], batch size: 98, lr: 1.71e-02, grad_scale: 16.0 +2023-03-28 00:25:20,604 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16441.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:27:15,090 INFO [train.py:892] (0/4) Epoch 9, batch 1650, loss[loss=0.2423, simple_loss=0.2965, pruned_loss=0.09406, over 19756.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.2964, pruned_loss=0.093, over 3949682.12 frames. ], batch size: 209, lr: 1.71e-02, grad_scale: 16.0 +2023-03-28 00:27:28,900 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1021, 2.3945, 3.2700, 2.8083, 2.9740, 3.3099, 1.9399, 2.0920], + device='cuda:0'), covar=tensor([0.0730, 0.2163, 0.0446, 0.0547, 0.1135, 0.0593, 0.1488, 0.1857], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0319, 0.0244, 0.0198, 0.0309, 0.0229, 0.0256, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 00:27:46,364 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16505.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:27:52,669 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.351e+02 5.128e+02 6.235e+02 7.643e+02 1.695e+03, threshold=1.247e+03, percent-clipped=3.0 +2023-03-28 00:29:04,135 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16539.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:29:07,568 INFO [train.py:892] (0/4) Epoch 9, batch 1700, loss[loss=0.2717, simple_loss=0.3167, pruned_loss=0.1133, over 19809.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.2969, pruned_loss=0.09347, over 3949818.32 frames. ], batch size: 229, lr: 1.70e-02, grad_scale: 32.0 +2023-03-28 00:30:46,918 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16587.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:30:53,442 INFO [train.py:892] (0/4) Epoch 9, batch 1750, loss[loss=0.2448, simple_loss=0.2896, pruned_loss=0.1001, over 19855.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.2967, pruned_loss=0.09382, over 3950433.40 frames. ], batch size: 142, lr: 1.70e-02, grad_scale: 32.0 +2023-03-28 00:31:17,366 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0276, 4.9656, 5.5024, 5.2695, 5.2844, 4.7118, 5.1248, 4.9574], + device='cuda:0'), covar=tensor([0.1177, 0.1138, 0.0794, 0.0746, 0.0639, 0.1002, 0.1913, 0.2019], + device='cuda:0'), in_proj_covar=tensor([0.0235, 0.0225, 0.0281, 0.0215, 0.0216, 0.0209, 0.0273, 0.0309], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 00:31:28,583 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.990e+02 5.355e+02 6.431e+02 7.521e+02 1.438e+03, threshold=1.286e+03, percent-clipped=3.0 +2023-03-28 00:31:46,747 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3675, 4.3381, 2.6492, 4.7933, 5.1040, 1.9790, 4.1526, 3.6694], + device='cuda:0'), covar=tensor([0.0581, 0.0846, 0.2491, 0.0392, 0.0177, 0.3073, 0.0901, 0.0664], + device='cuda:0'), in_proj_covar=tensor([0.0181, 0.0205, 0.0205, 0.0178, 0.0133, 0.0196, 0.0212, 0.0148], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 00:32:00,804 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.87 vs. limit=2.0 +2023-03-28 00:32:29,086 INFO [train.py:892] (0/4) Epoch 9, batch 1800, loss[loss=0.2213, simple_loss=0.2872, pruned_loss=0.07775, over 19733.00 frames. ], tot_loss[loss=0.2404, simple_loss=0.2954, pruned_loss=0.09268, over 3951481.97 frames. ], batch size: 118, lr: 1.70e-02, grad_scale: 32.0 +2023-03-28 00:32:57,470 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16657.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:33:04,003 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-03-28 00:33:30,319 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7909, 4.3021, 4.2681, 4.7769, 4.3362, 4.9883, 4.8285, 5.0518], + device='cuda:0'), covar=tensor([0.0598, 0.0372, 0.0492, 0.0273, 0.0616, 0.0238, 0.0340, 0.0276], + device='cuda:0'), in_proj_covar=tensor([0.0120, 0.0139, 0.0166, 0.0137, 0.0137, 0.0117, 0.0128, 0.0155], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 00:33:58,386 INFO [train.py:892] (0/4) Epoch 9, batch 1850, loss[loss=0.2404, simple_loss=0.31, pruned_loss=0.0854, over 19840.00 frames. ], tot_loss[loss=0.2396, simple_loss=0.296, pruned_loss=0.09164, over 3951774.11 frames. ], batch size: 57, lr: 1.70e-02, grad_scale: 32.0 +2023-03-28 00:34:06,215 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-9.pt +2023-03-28 00:35:08,035 INFO [train.py:892] (0/4) Epoch 10, batch 0, loss[loss=0.2228, simple_loss=0.2937, pruned_loss=0.07599, over 19532.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2937, pruned_loss=0.07599, over 19532.00 frames. ], batch size: 54, lr: 1.61e-02, grad_scale: 32.0 +2023-03-28 00:35:08,036 INFO [train.py:917] (0/4) Computing validation loss +2023-03-28 00:35:42,738 INFO [train.py:926] (0/4) Epoch 10, validation: loss=0.1801, simple_loss=0.2601, pruned_loss=0.05003, over 2883724.00 frames. +2023-03-28 00:35:42,740 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-28 00:36:03,898 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16705.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:36:11,166 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.977e+02 5.093e+02 5.971e+02 7.550e+02 1.362e+03, threshold=1.194e+03, percent-clipped=1.0 +2023-03-28 00:36:55,602 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16727.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:37:29,756 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16741.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:37:43,749 INFO [train.py:892] (0/4) Epoch 10, batch 50, loss[loss=0.4029, simple_loss=0.4288, pruned_loss=0.1885, over 19416.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.2951, pruned_loss=0.09333, over 890976.91 frames. ], batch size: 431, lr: 1.61e-02, grad_scale: 32.0 +2023-03-28 00:39:23,194 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16789.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:39:38,219 INFO [train.py:892] (0/4) Epoch 10, batch 100, loss[loss=0.2237, simple_loss=0.2727, pruned_loss=0.08738, over 19794.00 frames. ], tot_loss[loss=0.2419, simple_loss=0.2964, pruned_loss=0.09376, over 1569784.81 frames. ], batch size: 126, lr: 1.61e-02, grad_scale: 32.0 +2023-03-28 00:39:59,423 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16805.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:40:04,291 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.771e+02 5.348e+02 6.393e+02 7.632e+02 1.172e+03, threshold=1.279e+03, percent-clipped=0.0 +2023-03-28 00:41:10,556 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.55 vs. limit=5.0 +2023-03-28 00:41:31,410 INFO [train.py:892] (0/4) Epoch 10, batch 150, loss[loss=0.2386, simple_loss=0.296, pruned_loss=0.09059, over 19647.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.2926, pruned_loss=0.09219, over 2097422.55 frames. ], batch size: 66, lr: 1.61e-02, grad_scale: 32.0 +2023-03-28 00:41:49,415 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16853.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:42:23,027 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16868.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 00:43:24,900 INFO [train.py:892] (0/4) Epoch 10, batch 200, loss[loss=0.2294, simple_loss=0.2774, pruned_loss=0.09076, over 19836.00 frames. ], tot_loss[loss=0.2392, simple_loss=0.2935, pruned_loss=0.09244, over 2508777.93 frames. ], batch size: 171, lr: 1.60e-02, grad_scale: 16.0 +2023-03-28 00:43:47,702 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16906.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:43:53,288 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.558e+02 5.040e+02 6.126e+02 7.288e+02 1.628e+03, threshold=1.225e+03, percent-clipped=1.0 +2023-03-28 00:44:41,758 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16929.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 00:44:55,438 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8503, 2.8536, 3.3939, 2.5751, 3.3380, 2.7078, 2.9205, 3.5116], + device='cuda:0'), covar=tensor([0.0460, 0.0428, 0.0303, 0.0731, 0.0301, 0.0310, 0.0416, 0.0200], + device='cuda:0'), in_proj_covar=tensor([0.0054, 0.0055, 0.0058, 0.0087, 0.0055, 0.0051, 0.0050, 0.0045], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-28 00:45:17,958 INFO [train.py:892] (0/4) Epoch 10, batch 250, loss[loss=0.2147, simple_loss=0.268, pruned_loss=0.0807, over 19789.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.2905, pruned_loss=0.09016, over 2828558.66 frames. ], batch size: 94, lr: 1.60e-02, grad_scale: 16.0 +2023-03-28 00:46:06,379 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16967.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 00:46:35,618 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9122, 4.0245, 2.3726, 4.2315, 4.4410, 1.8156, 3.6512, 3.4886], + device='cuda:0'), covar=tensor([0.0574, 0.0711, 0.2495, 0.0494, 0.0279, 0.2855, 0.0901, 0.0529], + device='cuda:0'), in_proj_covar=tensor([0.0180, 0.0204, 0.0204, 0.0178, 0.0133, 0.0192, 0.0210, 0.0147], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 00:47:10,485 INFO [train.py:892] (0/4) Epoch 10, batch 300, loss[loss=0.2265, simple_loss=0.2878, pruned_loss=0.08257, over 19804.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.2929, pruned_loss=0.09141, over 3077508.15 frames. ], batch size: 86, lr: 1.60e-02, grad_scale: 16.0 +2023-03-28 00:47:30,470 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-28 00:47:41,232 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.429e+02 5.102e+02 6.617e+02 8.376e+02 1.348e+03, threshold=1.323e+03, percent-clipped=5.0 +2023-03-28 00:48:21,791 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17027.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:49:09,353 INFO [train.py:892] (0/4) Epoch 10, batch 350, loss[loss=0.2475, simple_loss=0.3012, pruned_loss=0.09689, over 19780.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.2934, pruned_loss=0.09086, over 3271134.85 frames. ], batch size: 213, lr: 1.60e-02, grad_scale: 16.0 +2023-03-28 00:50:14,049 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17075.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:50:40,603 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3210, 4.7727, 4.9479, 4.7637, 5.2210, 3.5282, 4.0671, 2.9878], + device='cuda:0'), covar=tensor([0.0190, 0.0164, 0.0130, 0.0158, 0.0141, 0.0676, 0.0974, 0.1123], + device='cuda:0'), in_proj_covar=tensor([0.0080, 0.0107, 0.0093, 0.0106, 0.0095, 0.0115, 0.0128, 0.0110], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 00:50:59,658 INFO [train.py:892] (0/4) Epoch 10, batch 400, loss[loss=0.241, simple_loss=0.3076, pruned_loss=0.08715, over 19820.00 frames. ], tot_loss[loss=0.2377, simple_loss=0.2937, pruned_loss=0.09084, over 3421614.77 frames. ], batch size: 57, lr: 1.59e-02, grad_scale: 16.0 +2023-03-28 00:51:28,619 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.044e+02 4.864e+02 6.028e+02 7.083e+02 1.226e+03, threshold=1.206e+03, percent-clipped=0.0 +2023-03-28 00:52:06,532 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.82 vs. limit=2.0 +2023-03-28 00:52:21,963 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.2841, 2.4537, 2.6503, 2.2951, 2.1828, 2.0997, 2.2834, 2.4490], + device='cuda:0'), covar=tensor([0.0242, 0.0386, 0.0273, 0.0359, 0.0497, 0.0427, 0.0332, 0.0413], + device='cuda:0'), in_proj_covar=tensor([0.0042, 0.0043, 0.0045, 0.0038, 0.0049, 0.0047, 0.0059, 0.0042], + device='cuda:0'), out_proj_covar=tensor([9.3597e-05, 9.6986e-05, 1.0029e-04, 8.6420e-05, 1.0930e-04, 1.0504e-04, + 1.2992e-04, 9.7271e-05], device='cuda:0') +2023-03-28 00:52:51,303 INFO [train.py:892] (0/4) Epoch 10, batch 450, loss[loss=0.2492, simple_loss=0.3232, pruned_loss=0.0876, over 19670.00 frames. ], tot_loss[loss=0.2377, simple_loss=0.2936, pruned_loss=0.09084, over 3539542.42 frames. ], batch size: 49, lr: 1.59e-02, grad_scale: 16.0 +2023-03-28 00:54:30,980 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17189.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:54:47,706 INFO [train.py:892] (0/4) Epoch 10, batch 500, loss[loss=0.1988, simple_loss=0.2631, pruned_loss=0.06723, over 19790.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.294, pruned_loss=0.09109, over 3630003.15 frames. ], batch size: 83, lr: 1.59e-02, grad_scale: 16.0 +2023-03-28 00:55:07,263 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4607, 2.4414, 2.7815, 2.6433, 2.8762, 2.9211, 3.3452, 3.5085], + device='cuda:0'), covar=tensor([0.0509, 0.1308, 0.1265, 0.1502, 0.1345, 0.1159, 0.0381, 0.0419], + device='cuda:0'), in_proj_covar=tensor([0.0194, 0.0208, 0.0222, 0.0226, 0.0248, 0.0216, 0.0164, 0.0164], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 00:55:18,932 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.795e+02 5.158e+02 6.137e+02 7.953e+02 1.330e+03, threshold=1.227e+03, percent-clipped=1.0 +2023-03-28 00:55:50,531 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17224.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 00:56:39,661 INFO [train.py:892] (0/4) Epoch 10, batch 550, loss[loss=0.2158, simple_loss=0.2767, pruned_loss=0.07747, over 19863.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.2955, pruned_loss=0.09194, over 3701701.86 frames. ], batch size: 99, lr: 1.59e-02, grad_scale: 16.0 +2023-03-28 00:56:49,485 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17250.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 00:57:16,222 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17262.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 00:58:29,315 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9200, 2.2258, 2.9545, 2.7501, 3.1233, 3.0138, 3.8591, 4.0209], + device='cuda:0'), covar=tensor([0.0493, 0.1915, 0.1486, 0.1801, 0.1609, 0.1336, 0.0417, 0.0437], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0210, 0.0224, 0.0228, 0.0248, 0.0219, 0.0165, 0.0165], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 00:58:30,103 INFO [train.py:892] (0/4) Epoch 10, batch 600, loss[loss=0.2508, simple_loss=0.2926, pruned_loss=0.1045, over 19881.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.2941, pruned_loss=0.09105, over 3757498.03 frames. ], batch size: 134, lr: 1.59e-02, grad_scale: 16.0 +2023-03-28 00:59:00,058 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.688e+02 5.075e+02 6.633e+02 7.997e+02 1.370e+03, threshold=1.327e+03, percent-clipped=3.0 +2023-03-28 01:00:24,290 INFO [train.py:892] (0/4) Epoch 10, batch 650, loss[loss=0.231, simple_loss=0.289, pruned_loss=0.08654, over 19658.00 frames. ], tot_loss[loss=0.2365, simple_loss=0.2928, pruned_loss=0.09013, over 3800591.93 frames. ], batch size: 67, lr: 1.58e-02, grad_scale: 16.0 +2023-03-28 01:01:01,096 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17362.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:01:48,743 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17383.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 01:01:49,290 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-03-28 01:02:19,162 INFO [train.py:892] (0/4) Epoch 10, batch 700, loss[loss=0.2442, simple_loss=0.2984, pruned_loss=0.09499, over 19707.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.2928, pruned_loss=0.09024, over 3834702.19 frames. ], batch size: 60, lr: 1.58e-02, grad_scale: 16.0 +2023-03-28 01:02:48,229 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.516e+02 5.181e+02 6.486e+02 7.594e+02 1.327e+03, threshold=1.297e+03, percent-clipped=1.0 +2023-03-28 01:03:20,263 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17423.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:03:26,789 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.9863, 2.2803, 1.8623, 1.4140, 1.9149, 2.0031, 2.1105, 2.2074], + device='cuda:0'), covar=tensor([0.0230, 0.0198, 0.0237, 0.0533, 0.0464, 0.0227, 0.0190, 0.0171], + device='cuda:0'), in_proj_covar=tensor([0.0057, 0.0052, 0.0059, 0.0073, 0.0072, 0.0050, 0.0044, 0.0047], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-28 01:04:06,752 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17444.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 01:04:10,172 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4297, 3.2979, 4.9764, 3.6741, 4.3727, 4.4372, 2.3613, 2.7953], + device='cuda:0'), covar=tensor([0.0624, 0.2780, 0.0276, 0.0642, 0.1040, 0.0570, 0.1701, 0.1999], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0326, 0.0251, 0.0205, 0.0317, 0.0239, 0.0264, 0.0253], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 01:04:11,118 INFO [train.py:892] (0/4) Epoch 10, batch 750, loss[loss=0.1962, simple_loss=0.2609, pruned_loss=0.06579, over 19908.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.2945, pruned_loss=0.09133, over 3859806.21 frames. ], batch size: 45, lr: 1.58e-02, grad_scale: 16.0 +2023-03-28 01:06:02,439 INFO [train.py:892] (0/4) Epoch 10, batch 800, loss[loss=0.2112, simple_loss=0.2773, pruned_loss=0.07253, over 19893.00 frames. ], tot_loss[loss=0.237, simple_loss=0.2932, pruned_loss=0.09037, over 3880211.93 frames. ], batch size: 113, lr: 1.58e-02, grad_scale: 16.0 +2023-03-28 01:06:32,363 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.436e+02 5.370e+02 6.448e+02 7.965e+02 2.022e+03, threshold=1.290e+03, percent-clipped=3.0 +2023-03-28 01:07:07,114 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17524.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 01:07:53,811 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17545.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:07:54,966 INFO [train.py:892] (0/4) Epoch 10, batch 850, loss[loss=0.2116, simple_loss=0.2833, pruned_loss=0.06991, over 19846.00 frames. ], tot_loss[loss=0.238, simple_loss=0.2939, pruned_loss=0.09106, over 3895451.14 frames. ], batch size: 59, lr: 1.58e-02, grad_scale: 16.0 +2023-03-28 01:08:16,963 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.08 vs. limit=2.0 +2023-03-28 01:08:34,016 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17562.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 01:08:53,799 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17572.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 01:09:21,419 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8839, 2.9996, 4.2383, 3.0630, 3.5388, 3.9023, 2.3620, 2.6301], + device='cuda:0'), covar=tensor([0.0678, 0.2425, 0.0362, 0.0628, 0.1180, 0.0574, 0.1445, 0.1746], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0322, 0.0247, 0.0203, 0.0313, 0.0236, 0.0264, 0.0249], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 01:09:47,744 INFO [train.py:892] (0/4) Epoch 10, batch 900, loss[loss=0.2261, simple_loss=0.2823, pruned_loss=0.08501, over 19867.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.2937, pruned_loss=0.09106, over 3908401.27 frames. ], batch size: 136, lr: 1.57e-02, grad_scale: 16.0 +2023-03-28 01:10:02,643 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.6500, 1.8776, 1.6459, 1.0577, 1.6036, 1.7438, 1.6146, 1.7488], + device='cuda:0'), covar=tensor([0.0228, 0.0198, 0.0221, 0.0530, 0.0395, 0.0171, 0.0190, 0.0170], + device='cuda:0'), in_proj_covar=tensor([0.0059, 0.0054, 0.0061, 0.0076, 0.0075, 0.0053, 0.0046, 0.0049], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-28 01:10:17,487 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.166e+02 5.484e+02 6.655e+02 8.099e+02 1.732e+03, threshold=1.331e+03, percent-clipped=3.0 +2023-03-28 01:10:20,721 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17610.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:11:39,744 INFO [train.py:892] (0/4) Epoch 10, batch 950, loss[loss=0.2153, simple_loss=0.2688, pruned_loss=0.08094, over 19839.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.2946, pruned_loss=0.09129, over 3917510.51 frames. ], batch size: 93, lr: 1.57e-02, grad_scale: 16.0 +2023-03-28 01:12:00,858 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5326, 3.5469, 2.1951, 3.8362, 3.9262, 1.5749, 3.0662, 2.9025], + device='cuda:0'), covar=tensor([0.0622, 0.0794, 0.2619, 0.0645, 0.0338, 0.3346, 0.1195, 0.0742], + device='cuda:0'), in_proj_covar=tensor([0.0186, 0.0208, 0.0211, 0.0186, 0.0144, 0.0200, 0.0216, 0.0153], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 01:12:35,701 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.5289, 4.9781, 5.0996, 4.9810, 5.4160, 3.6382, 4.1767, 2.8900], + device='cuda:0'), covar=tensor([0.0151, 0.0139, 0.0126, 0.0122, 0.0121, 0.0625, 0.0911, 0.1148], + device='cuda:0'), in_proj_covar=tensor([0.0081, 0.0108, 0.0093, 0.0106, 0.0096, 0.0116, 0.0128, 0.0110], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 01:12:53,556 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6307, 4.6867, 5.1819, 4.7098, 4.2853, 4.9433, 4.8456, 5.3262], + device='cuda:0'), covar=tensor([0.1108, 0.0315, 0.0375, 0.0333, 0.0636, 0.0369, 0.0314, 0.0269], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0188, 0.0181, 0.0187, 0.0182, 0.0190, 0.0178, 0.0169], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 01:13:32,345 INFO [train.py:892] (0/4) Epoch 10, batch 1000, loss[loss=0.2262, simple_loss=0.2972, pruned_loss=0.07762, over 19917.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.2927, pruned_loss=0.08996, over 3926440.40 frames. ], batch size: 53, lr: 1.57e-02, grad_scale: 16.0 +2023-03-28 01:13:41,691 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17700.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:14:02,554 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.971e+02 5.177e+02 6.007e+02 7.029e+02 1.681e+03, threshold=1.201e+03, percent-clipped=2.0 +2023-03-28 01:14:20,977 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17718.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:14:47,972 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17730.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:15:09,091 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17739.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 01:15:26,012 INFO [train.py:892] (0/4) Epoch 10, batch 1050, loss[loss=0.1958, simple_loss=0.2579, pruned_loss=0.06686, over 19752.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.2934, pruned_loss=0.09064, over 3931608.26 frames. ], batch size: 110, lr: 1.57e-02, grad_scale: 16.0 +2023-03-28 01:15:57,165 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17761.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:17:06,020 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17791.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:17:16,098 INFO [train.py:892] (0/4) Epoch 10, batch 1100, loss[loss=0.2478, simple_loss=0.2975, pruned_loss=0.09906, over 19754.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.2922, pruned_loss=0.08926, over 3936722.01 frames. ], batch size: 256, lr: 1.57e-02, grad_scale: 16.0 +2023-03-28 01:17:38,638 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5729, 3.6487, 4.0092, 3.5730, 3.5652, 3.8974, 3.6348, 4.0668], + device='cuda:0'), covar=tensor([0.0990, 0.0337, 0.0327, 0.0382, 0.0938, 0.0433, 0.0444, 0.0317], + device='cuda:0'), in_proj_covar=tensor([0.0240, 0.0183, 0.0177, 0.0185, 0.0178, 0.0184, 0.0175, 0.0167], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 01:17:45,687 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.952e+02 5.448e+02 6.504e+02 7.690e+02 1.433e+03, threshold=1.301e+03, percent-clipped=1.0 +2023-03-28 01:17:47,052 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-28 01:19:10,799 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17845.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:19:11,948 INFO [train.py:892] (0/4) Epoch 10, batch 1150, loss[loss=0.2044, simple_loss=0.2786, pruned_loss=0.0651, over 19930.00 frames. ], tot_loss[loss=0.236, simple_loss=0.2924, pruned_loss=0.08981, over 3938516.67 frames. ], batch size: 51, lr: 1.56e-02, grad_scale: 16.0 +2023-03-28 01:19:19,175 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0046, 2.9174, 1.7064, 3.6365, 3.3970, 3.6187, 3.6957, 2.8920], + device='cuda:0'), covar=tensor([0.0589, 0.0581, 0.1622, 0.0462, 0.0397, 0.0308, 0.0416, 0.0737], + device='cuda:0'), in_proj_covar=tensor([0.0120, 0.0112, 0.0129, 0.0117, 0.0102, 0.0095, 0.0111, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 01:20:58,766 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17893.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:21:06,372 INFO [train.py:892] (0/4) Epoch 10, batch 1200, loss[loss=0.2023, simple_loss=0.2657, pruned_loss=0.06946, over 19710.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.2923, pruned_loss=0.08962, over 3940940.20 frames. ], batch size: 78, lr: 1.56e-02, grad_scale: 16.0 +2023-03-28 01:21:34,285 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.688e+02 5.021e+02 6.012e+02 7.162e+02 1.373e+03, threshold=1.202e+03, percent-clipped=1.0 +2023-03-28 01:22:03,835 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2227, 5.5542, 5.6096, 5.5619, 5.2145, 5.5185, 4.9438, 5.0921], + device='cuda:0'), covar=tensor([0.0334, 0.0372, 0.0493, 0.0400, 0.0579, 0.0589, 0.0583, 0.0878], + device='cuda:0'), in_proj_covar=tensor([0.0184, 0.0183, 0.0225, 0.0188, 0.0178, 0.0168, 0.0201, 0.0237], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 01:22:56,640 INFO [train.py:892] (0/4) Epoch 10, batch 1250, loss[loss=0.2067, simple_loss=0.2666, pruned_loss=0.07343, over 19878.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.2914, pruned_loss=0.08914, over 3943146.03 frames. ], batch size: 95, lr: 1.56e-02, grad_scale: 16.0 +2023-03-28 01:23:30,625 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17960.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:24:49,120 INFO [train.py:892] (0/4) Epoch 10, batch 1300, loss[loss=0.2517, simple_loss=0.3093, pruned_loss=0.0971, over 19915.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.2928, pruned_loss=0.08987, over 3944607.35 frames. ], batch size: 53, lr: 1.56e-02, grad_scale: 16.0 +2023-03-28 01:25:00,218 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-18000.pt +2023-03-28 01:25:24,408 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.596e+02 5.072e+02 5.807e+02 7.353e+02 1.557e+03, threshold=1.161e+03, percent-clipped=4.0 +2023-03-28 01:25:44,796 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18018.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:25:50,495 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18021.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:26:33,907 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18039.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 01:26:47,395 INFO [train.py:892] (0/4) Epoch 10, batch 1350, loss[loss=0.2528, simple_loss=0.3124, pruned_loss=0.09661, over 19810.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.292, pruned_loss=0.08944, over 3946117.14 frames. ], batch size: 67, lr: 1.56e-02, grad_scale: 16.0 +2023-03-28 01:27:11,861 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18056.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:27:30,011 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.61 vs. limit=2.0 +2023-03-28 01:27:33,006 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18066.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:28:19,529 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18086.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:28:21,749 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18087.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 01:28:41,070 INFO [train.py:892] (0/4) Epoch 10, batch 1400, loss[loss=0.2391, simple_loss=0.2925, pruned_loss=0.09279, over 19791.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.2918, pruned_loss=0.08932, over 3946187.06 frames. ], batch size: 211, lr: 1.55e-02, grad_scale: 16.0 +2023-03-28 01:29:09,602 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.393e+02 4.780e+02 5.656e+02 7.029e+02 1.296e+03, threshold=1.131e+03, percent-clipped=2.0 +2023-03-28 01:30:37,308 INFO [train.py:892] (0/4) Epoch 10, batch 1450, loss[loss=0.2487, simple_loss=0.2984, pruned_loss=0.09948, over 19837.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.2921, pruned_loss=0.08939, over 3946727.15 frames. ], batch size: 177, lr: 1.55e-02, grad_scale: 16.0 +2023-03-28 01:31:44,868 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18174.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:31:47,355 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18175.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:32:31,638 INFO [train.py:892] (0/4) Epoch 10, batch 1500, loss[loss=0.2243, simple_loss=0.2901, pruned_loss=0.07926, over 19803.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.2922, pruned_loss=0.08918, over 3947779.60 frames. ], batch size: 67, lr: 1.55e-02, grad_scale: 16.0 +2023-03-28 01:33:02,750 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.786e+02 5.080e+02 6.023e+02 7.472e+02 1.411e+03, threshold=1.205e+03, percent-clipped=3.0 +2023-03-28 01:34:02,597 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18235.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:34:04,819 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18236.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:34:27,162 INFO [train.py:892] (0/4) Epoch 10, batch 1550, loss[loss=0.2223, simple_loss=0.2969, pruned_loss=0.07389, over 19844.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.2909, pruned_loss=0.08832, over 3948640.85 frames. ], batch size: 56, lr: 1.55e-02, grad_scale: 16.0 +2023-03-28 01:34:32,121 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-28 01:36:21,490 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6157, 3.1280, 3.4556, 3.4221, 3.8489, 4.0033, 4.3865, 4.9727], + device='cuda:0'), covar=tensor([0.0419, 0.1400, 0.1222, 0.1646, 0.1465, 0.0924, 0.0434, 0.0350], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0209, 0.0224, 0.0225, 0.0249, 0.0221, 0.0169, 0.0170], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 01:36:22,688 INFO [train.py:892] (0/4) Epoch 10, batch 1600, loss[loss=0.2325, simple_loss=0.2966, pruned_loss=0.08424, over 19875.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.2925, pruned_loss=0.08923, over 3948155.61 frames. ], batch size: 53, lr: 1.55e-02, grad_scale: 16.0 +2023-03-28 01:36:49,423 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.277e+02 5.238e+02 6.120e+02 7.230e+02 1.238e+03, threshold=1.224e+03, percent-clipped=2.0 +2023-03-28 01:36:58,544 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7074, 3.7600, 2.1876, 4.1097, 4.2064, 1.8133, 3.3123, 3.2242], + device='cuda:0'), covar=tensor([0.0629, 0.0722, 0.2575, 0.0526, 0.0271, 0.2798, 0.1027, 0.0660], + device='cuda:0'), in_proj_covar=tensor([0.0187, 0.0210, 0.0206, 0.0188, 0.0146, 0.0196, 0.0215, 0.0152], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 01:37:03,896 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18316.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:38:12,208 INFO [train.py:892] (0/4) Epoch 10, batch 1650, loss[loss=0.2022, simple_loss=0.2645, pruned_loss=0.06996, over 19892.00 frames. ], tot_loss[loss=0.235, simple_loss=0.2923, pruned_loss=0.08889, over 3949340.83 frames. ], batch size: 88, lr: 1.54e-02, grad_scale: 16.0 +2023-03-28 01:38:36,318 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18356.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:39:04,886 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8013, 4.0035, 3.9875, 4.0572, 3.7536, 3.9726, 3.6561, 3.1976], + device='cuda:0'), covar=tensor([0.0943, 0.1052, 0.1191, 0.0883, 0.1168, 0.1080, 0.1364, 0.2732], + device='cuda:0'), in_proj_covar=tensor([0.0185, 0.0187, 0.0223, 0.0189, 0.0179, 0.0171, 0.0200, 0.0238], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 01:39:13,521 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7659, 2.0106, 2.6333, 2.9708, 3.2894, 3.5495, 3.4985, 3.6570], + device='cuda:0'), covar=tensor([0.0781, 0.1837, 0.1040, 0.0541, 0.0426, 0.0216, 0.0237, 0.0280], + device='cuda:0'), in_proj_covar=tensor([0.0135, 0.0172, 0.0154, 0.0125, 0.0107, 0.0101, 0.0092, 0.0097], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 01:39:43,585 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18386.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:40:06,513 INFO [train.py:892] (0/4) Epoch 10, batch 1700, loss[loss=0.2344, simple_loss=0.3017, pruned_loss=0.08357, over 19694.00 frames. ], tot_loss[loss=0.2365, simple_loss=0.2935, pruned_loss=0.08974, over 3948362.31 frames. ], batch size: 48, lr: 1.54e-02, grad_scale: 16.0 +2023-03-28 01:40:07,597 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8197, 2.6815, 3.1227, 2.9495, 3.0061, 3.1609, 3.5310, 3.9573], + device='cuda:0'), covar=tensor([0.0512, 0.1517, 0.1175, 0.1611, 0.1649, 0.1165, 0.0443, 0.0420], + device='cuda:0'), in_proj_covar=tensor([0.0199, 0.0212, 0.0227, 0.0228, 0.0251, 0.0223, 0.0170, 0.0171], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 01:40:24,838 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18404.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:40:35,305 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.958e+02 5.263e+02 6.960e+02 8.795e+02 1.250e+03, threshold=1.392e+03, percent-clipped=4.0 +2023-03-28 01:40:36,676 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-28 01:40:51,832 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3354, 5.8581, 6.0110, 5.6372, 5.5839, 5.4511, 5.5663, 5.4809], + device='cuda:0'), covar=tensor([0.1271, 0.0830, 0.0788, 0.0944, 0.0588, 0.0609, 0.1740, 0.1872], + device='cuda:0'), in_proj_covar=tensor([0.0241, 0.0230, 0.0297, 0.0224, 0.0218, 0.0213, 0.0282, 0.0312], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 01:41:20,915 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-03-28 01:41:29,818 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18434.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:41:54,333 INFO [train.py:892] (0/4) Epoch 10, batch 1750, loss[loss=0.2305, simple_loss=0.3016, pruned_loss=0.07973, over 19818.00 frames. ], tot_loss[loss=0.235, simple_loss=0.2925, pruned_loss=0.08878, over 3948030.58 frames. ], batch size: 57, lr: 1.54e-02, grad_scale: 16.0 +2023-03-28 01:42:20,301 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2673, 4.7690, 4.9514, 4.7135, 5.2036, 3.2962, 4.0931, 2.7685], + device='cuda:0'), covar=tensor([0.0148, 0.0155, 0.0126, 0.0146, 0.0111, 0.0812, 0.0898, 0.1322], + device='cuda:0'), in_proj_covar=tensor([0.0081, 0.0109, 0.0096, 0.0108, 0.0096, 0.0118, 0.0130, 0.0112], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 01:43:20,386 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18490.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:43:30,627 INFO [train.py:892] (0/4) Epoch 10, batch 1800, loss[loss=0.201, simple_loss=0.2728, pruned_loss=0.06461, over 19588.00 frames. ], tot_loss[loss=0.234, simple_loss=0.2915, pruned_loss=0.08821, over 3947638.15 frames. ], batch size: 53, lr: 1.54e-02, grad_scale: 16.0 +2023-03-28 01:43:48,683 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.33 vs. limit=5.0 +2023-03-28 01:43:51,883 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18508.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 01:43:52,990 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.190e+02 4.885e+02 5.842e+02 7.130e+02 1.277e+03, threshold=1.168e+03, percent-clipped=0.0 +2023-03-28 01:44:05,397 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.64 vs. limit=5.0 +2023-03-28 01:44:30,985 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18530.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:44:32,733 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18531.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:44:58,712 INFO [train.py:892] (0/4) Epoch 10, batch 1850, loss[loss=0.2669, simple_loss=0.3239, pruned_loss=0.105, over 19824.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.2936, pruned_loss=0.088, over 3946756.15 frames. ], batch size: 57, lr: 1.54e-02, grad_scale: 16.0 +2023-03-28 01:45:06,048 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-10.pt +2023-03-28 01:46:02,459 INFO [train.py:892] (0/4) Epoch 11, batch 0, loss[loss=0.3131, simple_loss=0.3443, pruned_loss=0.141, over 19676.00 frames. ], tot_loss[loss=0.3131, simple_loss=0.3443, pruned_loss=0.141, over 19676.00 frames. ], batch size: 325, lr: 1.47e-02, grad_scale: 16.0 +2023-03-28 01:46:02,460 INFO [train.py:917] (0/4) Computing validation loss +2023-03-28 01:46:36,865 INFO [train.py:926] (0/4) Epoch 11, validation: loss=0.1783, simple_loss=0.2585, pruned_loss=0.04909, over 2883724.00 frames. +2023-03-28 01:46:36,866 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-28 01:46:37,839 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18551.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:47:18,519 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18569.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 01:48:31,659 INFO [train.py:892] (0/4) Epoch 11, batch 50, loss[loss=0.1996, simple_loss=0.2555, pruned_loss=0.0719, over 19863.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.2818, pruned_loss=0.0835, over 890643.60 frames. ], batch size: 64, lr: 1.46e-02, grad_scale: 16.0 +2023-03-28 01:48:47,839 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.428e+02 5.161e+02 6.219e+02 7.148e+02 1.502e+03, threshold=1.244e+03, percent-clipped=1.0 +2023-03-28 01:49:05,493 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18616.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:49:14,171 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.52 vs. limit=5.0 +2023-03-28 01:49:43,788 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.8780, 2.0309, 1.8648, 1.2419, 1.9002, 2.0593, 1.9465, 2.0000], + device='cuda:0'), covar=tensor([0.0293, 0.0185, 0.0222, 0.0509, 0.0368, 0.0200, 0.0186, 0.0201], + device='cuda:0'), in_proj_covar=tensor([0.0061, 0.0055, 0.0062, 0.0075, 0.0075, 0.0053, 0.0046, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-28 01:50:21,240 INFO [train.py:892] (0/4) Epoch 11, batch 100, loss[loss=0.2312, simple_loss=0.2939, pruned_loss=0.08426, over 19886.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.2871, pruned_loss=0.08639, over 1569865.26 frames. ], batch size: 88, lr: 1.46e-02, grad_scale: 16.0 +2023-03-28 01:50:31,432 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-03-28 01:50:49,679 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18664.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:51:15,938 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18674.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:52:13,641 INFO [train.py:892] (0/4) Epoch 11, batch 150, loss[loss=0.2029, simple_loss=0.2654, pruned_loss=0.0702, over 19778.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.2905, pruned_loss=0.08818, over 2097223.22 frames. ], batch size: 94, lr: 1.46e-02, grad_scale: 16.0 +2023-03-28 01:52:35,336 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.696e+02 5.324e+02 6.674e+02 8.055e+02 1.622e+03, threshold=1.335e+03, percent-clipped=4.0 +2023-03-28 01:52:43,719 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4000, 2.4638, 1.4532, 2.9749, 2.8015, 2.8466, 3.0039, 2.2564], + device='cuda:0'), covar=tensor([0.0664, 0.0610, 0.1566, 0.0460, 0.0539, 0.0489, 0.0449, 0.0835], + device='cuda:0'), in_proj_covar=tensor([0.0122, 0.0115, 0.0131, 0.0118, 0.0104, 0.0099, 0.0113, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 01:53:31,431 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18735.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:54:09,408 INFO [train.py:892] (0/4) Epoch 11, batch 200, loss[loss=0.268, simple_loss=0.3111, pruned_loss=0.1124, over 19865.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.2936, pruned_loss=0.08869, over 2506505.03 frames. ], batch size: 157, lr: 1.46e-02, grad_scale: 16.0 +2023-03-28 01:56:02,236 INFO [train.py:892] (0/4) Epoch 11, batch 250, loss[loss=0.1951, simple_loss=0.2649, pruned_loss=0.06264, over 19887.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.2929, pruned_loss=0.08833, over 2825398.86 frames. ], batch size: 92, lr: 1.46e-02, grad_scale: 16.0 +2023-03-28 01:56:11,729 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5941, 2.8960, 4.7166, 3.9229, 4.6194, 4.7148, 4.7375, 4.5775], + device='cuda:0'), covar=tensor([0.0196, 0.0696, 0.0084, 0.1112, 0.0082, 0.0163, 0.0094, 0.0085], + device='cuda:0'), in_proj_covar=tensor([0.0076, 0.0088, 0.0070, 0.0143, 0.0062, 0.0076, 0.0073, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 01:56:18,951 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.168e+02 4.641e+02 5.456e+02 6.612e+02 1.459e+03, threshold=1.091e+03, percent-clipped=1.0 +2023-03-28 01:57:08,046 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18830.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:57:08,212 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6409, 2.5669, 1.5914, 3.2596, 2.9601, 3.1571, 3.2324, 2.4618], + device='cuda:0'), covar=tensor([0.0653, 0.0658, 0.1654, 0.0428, 0.0553, 0.0421, 0.0483, 0.0777], + device='cuda:0'), in_proj_covar=tensor([0.0121, 0.0114, 0.0129, 0.0117, 0.0104, 0.0098, 0.0114, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 01:57:10,079 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18831.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:57:44,288 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18846.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:57:56,661 INFO [train.py:892] (0/4) Epoch 11, batch 300, loss[loss=0.2425, simple_loss=0.3004, pruned_loss=0.09235, over 19750.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.2916, pruned_loss=0.08726, over 3075051.95 frames. ], batch size: 256, lr: 1.46e-02, grad_scale: 16.0 +2023-03-28 01:58:25,286 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18864.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 01:58:48,408 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4422, 2.6645, 2.9717, 3.4234, 2.3079, 2.8919, 2.3001, 2.0979], + device='cuda:0'), covar=tensor([0.0496, 0.2232, 0.0929, 0.0283, 0.2247, 0.0726, 0.1192, 0.1886], + device='cuda:0'), in_proj_covar=tensor([0.0191, 0.0334, 0.0224, 0.0142, 0.0238, 0.0171, 0.0195, 0.0210], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 01:58:56,455 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18878.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:58:58,536 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18879.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 01:59:48,633 INFO [train.py:892] (0/4) Epoch 11, batch 350, loss[loss=0.2244, simple_loss=0.2728, pruned_loss=0.08798, over 19776.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.2917, pruned_loss=0.08762, over 3268328.71 frames. ], batch size: 152, lr: 1.45e-02, grad_scale: 32.0 +2023-03-28 02:00:05,180 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.253e+02 5.242e+02 6.180e+02 7.064e+02 1.171e+03, threshold=1.236e+03, percent-clipped=1.0 +2023-03-28 02:00:54,797 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18930.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:01:03,560 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-28 02:01:38,287 INFO [train.py:892] (0/4) Epoch 11, batch 400, loss[loss=0.2582, simple_loss=0.3097, pruned_loss=0.1034, over 19732.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.2908, pruned_loss=0.08718, over 3419825.50 frames. ], batch size: 209, lr: 1.45e-02, grad_scale: 32.0 +2023-03-28 02:02:08,510 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5795, 4.6853, 5.0990, 4.6627, 4.1786, 4.8624, 4.7806, 5.2699], + device='cuda:0'), covar=tensor([0.1067, 0.0335, 0.0383, 0.0356, 0.0705, 0.0404, 0.0298, 0.0261], + device='cuda:0'), in_proj_covar=tensor([0.0249, 0.0188, 0.0181, 0.0191, 0.0181, 0.0187, 0.0180, 0.0171], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 02:03:08,240 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18991.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:03:30,248 INFO [train.py:892] (0/4) Epoch 11, batch 450, loss[loss=0.2078, simple_loss=0.2693, pruned_loss=0.07317, over 19474.00 frames. ], tot_loss[loss=0.231, simple_loss=0.2897, pruned_loss=0.08615, over 3537740.68 frames. ], batch size: 43, lr: 1.45e-02, grad_scale: 32.0 +2023-03-28 02:03:47,205 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.137e+02 4.905e+02 5.923e+02 7.329e+02 1.154e+03, threshold=1.185e+03, percent-clipped=0.0 +2023-03-28 02:04:18,114 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-03-28 02:04:34,866 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19030.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:05:18,626 INFO [train.py:892] (0/4) Epoch 11, batch 500, loss[loss=0.2417, simple_loss=0.291, pruned_loss=0.09616, over 19733.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.2905, pruned_loss=0.08692, over 3628452.12 frames. ], batch size: 219, lr: 1.45e-02, grad_scale: 32.0 +2023-03-28 02:06:47,299 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-28 02:07:11,293 INFO [train.py:892] (0/4) Epoch 11, batch 550, loss[loss=0.2243, simple_loss=0.2786, pruned_loss=0.08506, over 19837.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.2911, pruned_loss=0.08781, over 3700274.63 frames. ], batch size: 143, lr: 1.45e-02, grad_scale: 32.0 +2023-03-28 02:07:27,681 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.995e+02 4.889e+02 6.159e+02 7.664e+02 1.397e+03, threshold=1.232e+03, percent-clipped=1.0 +2023-03-28 02:08:16,034 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19129.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 02:08:51,662 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19146.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:08:52,270 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-28 02:09:03,313 INFO [train.py:892] (0/4) Epoch 11, batch 600, loss[loss=0.2742, simple_loss=0.3429, pruned_loss=0.1028, over 19717.00 frames. ], tot_loss[loss=0.231, simple_loss=0.2892, pruned_loss=0.08644, over 3755975.97 frames. ], batch size: 54, lr: 1.44e-02, grad_scale: 32.0 +2023-03-28 02:09:34,359 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19164.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 02:10:33,419 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19190.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 02:10:41,226 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19194.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:10:55,570 INFO [train.py:892] (0/4) Epoch 11, batch 650, loss[loss=0.2001, simple_loss=0.2498, pruned_loss=0.07522, over 19846.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2879, pruned_loss=0.08617, over 3799793.85 frames. ], batch size: 124, lr: 1.44e-02, grad_scale: 32.0 +2023-03-28 02:11:13,800 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.235e+02 5.164e+02 6.078e+02 7.650e+02 1.184e+03, threshold=1.216e+03, percent-clipped=0.0 +2023-03-28 02:11:21,714 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19212.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 02:12:45,826 INFO [train.py:892] (0/4) Epoch 11, batch 700, loss[loss=0.1924, simple_loss=0.2514, pruned_loss=0.06673, over 19752.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2883, pruned_loss=0.08592, over 3829977.34 frames. ], batch size: 44, lr: 1.44e-02, grad_scale: 32.0 +2023-03-28 02:13:15,672 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8258, 2.2454, 2.6424, 3.2099, 3.7405, 4.1837, 4.1531, 4.3526], + device='cuda:0'), covar=tensor([0.0798, 0.1950, 0.1274, 0.0476, 0.0303, 0.0181, 0.0204, 0.0266], + device='cuda:0'), in_proj_covar=tensor([0.0133, 0.0166, 0.0151, 0.0123, 0.0105, 0.0100, 0.0094, 0.0092], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 02:14:07,662 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19286.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:14:41,778 INFO [train.py:892] (0/4) Epoch 11, batch 750, loss[loss=0.2535, simple_loss=0.3157, pruned_loss=0.0957, over 19708.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.2882, pruned_loss=0.08577, over 3853188.05 frames. ], batch size: 109, lr: 1.44e-02, grad_scale: 16.0 +2023-03-28 02:15:01,323 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.572e+02 4.867e+02 5.905e+02 6.947e+02 1.334e+03, threshold=1.181e+03, percent-clipped=1.0 +2023-03-28 02:15:27,307 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.79 vs. limit=5.0 +2023-03-28 02:15:47,102 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19330.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:16:33,248 INFO [train.py:892] (0/4) Epoch 11, batch 800, loss[loss=0.1989, simple_loss=0.2652, pruned_loss=0.0663, over 19881.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.2881, pruned_loss=0.08577, over 3874685.41 frames. ], batch size: 47, lr: 1.44e-02, grad_scale: 16.0 +2023-03-28 02:16:42,240 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-28 02:16:59,794 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.4592, 5.8387, 5.8844, 5.7669, 5.5935, 5.8592, 5.1776, 5.2448], + device='cuda:0'), covar=tensor([0.0336, 0.0374, 0.0514, 0.0407, 0.0473, 0.0566, 0.0680, 0.0994], + device='cuda:0'), in_proj_covar=tensor([0.0191, 0.0195, 0.0234, 0.0198, 0.0184, 0.0182, 0.0211, 0.0246], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 02:17:22,615 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6005, 4.1553, 4.1604, 4.5613, 4.1904, 4.6715, 4.6838, 4.8493], + device='cuda:0'), covar=tensor([0.0574, 0.0359, 0.0474, 0.0272, 0.0638, 0.0354, 0.0393, 0.0321], + device='cuda:0'), in_proj_covar=tensor([0.0125, 0.0143, 0.0166, 0.0137, 0.0142, 0.0122, 0.0128, 0.0160], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 02:17:37,537 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19378.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:18:12,933 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19394.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:18:27,070 INFO [train.py:892] (0/4) Epoch 11, batch 850, loss[loss=0.2099, simple_loss=0.2743, pruned_loss=0.07273, over 19770.00 frames. ], tot_loss[loss=0.231, simple_loss=0.2894, pruned_loss=0.0863, over 3890811.80 frames. ], batch size: 70, lr: 1.44e-02, grad_scale: 16.0 +2023-03-28 02:18:49,368 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.769e+02 4.779e+02 5.853e+02 6.766e+02 1.621e+03, threshold=1.171e+03, percent-clipped=3.0 +2023-03-28 02:20:13,348 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-03-28 02:20:21,647 INFO [train.py:892] (0/4) Epoch 11, batch 900, loss[loss=0.1984, simple_loss=0.2607, pruned_loss=0.06809, over 19646.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.2889, pruned_loss=0.08626, over 3904935.43 frames. ], batch size: 43, lr: 1.43e-02, grad_scale: 16.0 +2023-03-28 02:20:30,516 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19455.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:20:35,993 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7528, 4.7303, 5.1464, 4.9531, 4.8676, 4.3487, 4.7569, 4.7064], + device='cuda:0'), covar=tensor([0.1545, 0.1252, 0.0992, 0.1114, 0.1032, 0.1144, 0.2451, 0.2360], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0240, 0.0300, 0.0230, 0.0225, 0.0215, 0.0285, 0.0319], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 02:20:38,556 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-28 02:21:02,467 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9883, 2.6150, 3.1617, 2.9953, 3.2675, 3.1927, 3.7048, 4.1556], + device='cuda:0'), covar=tensor([0.0544, 0.1820, 0.1270, 0.1669, 0.1538, 0.1415, 0.0421, 0.0476], + device='cuda:0'), in_proj_covar=tensor([0.0199, 0.0214, 0.0232, 0.0230, 0.0256, 0.0223, 0.0171, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 02:21:39,395 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19485.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 02:22:13,039 INFO [train.py:892] (0/4) Epoch 11, batch 950, loss[loss=0.211, simple_loss=0.2821, pruned_loss=0.06997, over 19616.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.2888, pruned_loss=0.0862, over 3914823.07 frames. ], batch size: 51, lr: 1.43e-02, grad_scale: 16.0 +2023-03-28 02:22:30,339 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.330e+02 4.753e+02 5.819e+02 7.309e+02 1.447e+03, threshold=1.164e+03, percent-clipped=3.0 +2023-03-28 02:23:07,395 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-28 02:23:14,951 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0847, 4.3442, 4.6868, 4.3315, 4.0349, 4.5753, 4.4169, 4.8059], + device='cuda:0'), covar=tensor([0.1304, 0.0349, 0.0486, 0.0382, 0.0932, 0.0447, 0.0396, 0.0403], + device='cuda:0'), in_proj_covar=tensor([0.0248, 0.0186, 0.0182, 0.0191, 0.0184, 0.0187, 0.0183, 0.0172], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 02:23:42,092 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-28 02:24:02,916 INFO [train.py:892] (0/4) Epoch 11, batch 1000, loss[loss=0.2094, simple_loss=0.2697, pruned_loss=0.07449, over 19613.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.2888, pruned_loss=0.0864, over 3921436.76 frames. ], batch size: 46, lr: 1.43e-02, grad_scale: 16.0 +2023-03-28 02:24:30,990 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2734, 2.8538, 4.3666, 3.7016, 3.9842, 4.2579, 4.2430, 4.0997], + device='cuda:0'), covar=tensor([0.0168, 0.0650, 0.0087, 0.0798, 0.0117, 0.0168, 0.0113, 0.0098], + device='cuda:0'), in_proj_covar=tensor([0.0076, 0.0088, 0.0069, 0.0140, 0.0062, 0.0076, 0.0072, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 02:25:24,834 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19586.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:25:55,158 INFO [train.py:892] (0/4) Epoch 11, batch 1050, loss[loss=0.2204, simple_loss=0.2893, pruned_loss=0.0758, over 19562.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2886, pruned_loss=0.0858, over 3926601.64 frames. ], batch size: 60, lr: 1.43e-02, grad_scale: 16.0 +2023-03-28 02:26:01,339 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-28 02:26:15,115 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.901e+02 5.121e+02 6.164e+02 7.352e+02 1.446e+03, threshold=1.233e+03, percent-clipped=2.0 +2023-03-28 02:26:35,195 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19618.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:27:13,310 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19634.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:27:49,146 INFO [train.py:892] (0/4) Epoch 11, batch 1100, loss[loss=0.2096, simple_loss=0.2757, pruned_loss=0.07177, over 19693.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.2876, pruned_loss=0.08505, over 3933127.98 frames. ], batch size: 45, lr: 1.43e-02, grad_scale: 16.0 +2023-03-28 02:28:53,885 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19679.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:29:42,848 INFO [train.py:892] (0/4) Epoch 11, batch 1150, loss[loss=0.1942, simple_loss=0.2598, pruned_loss=0.06427, over 19869.00 frames. ], tot_loss[loss=0.2302, simple_loss=0.2891, pruned_loss=0.08569, over 3933961.33 frames. ], batch size: 122, lr: 1.43e-02, grad_scale: 16.0 +2023-03-28 02:30:05,100 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.048e+02 4.779e+02 5.602e+02 6.982e+02 1.239e+03, threshold=1.120e+03, percent-clipped=1.0 +2023-03-28 02:30:22,757 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.3277, 2.2384, 2.6351, 2.5028, 2.1380, 2.3374, 2.0545, 2.6017], + device='cuda:0'), covar=tensor([0.0217, 0.0302, 0.0220, 0.0218, 0.0295, 0.0257, 0.0433, 0.0276], + device='cuda:0'), in_proj_covar=tensor([0.0045, 0.0045, 0.0047, 0.0040, 0.0051, 0.0048, 0.0062, 0.0043], + device='cuda:0'), out_proj_covar=tensor([1.0183e-04, 1.0067e-04, 1.0516e-04, 9.0955e-05, 1.1385e-04, 1.0778e-04, + 1.3782e-04, 9.8491e-05], device='cuda:0') +2023-03-28 02:31:35,995 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19750.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:31:37,383 INFO [train.py:892] (0/4) Epoch 11, batch 1200, loss[loss=0.2197, simple_loss=0.2804, pruned_loss=0.07949, over 19754.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.2901, pruned_loss=0.08619, over 3937511.31 frames. ], batch size: 88, lr: 1.42e-02, grad_scale: 16.0 +2023-03-28 02:32:55,553 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19785.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 02:33:30,052 INFO [train.py:892] (0/4) Epoch 11, batch 1250, loss[loss=0.211, simple_loss=0.2656, pruned_loss=0.07814, over 19688.00 frames. ], tot_loss[loss=0.23, simple_loss=0.2886, pruned_loss=0.08566, over 3940647.46 frames. ], batch size: 74, lr: 1.42e-02, grad_scale: 16.0 +2023-03-28 02:33:49,267 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.235e+02 4.789e+02 5.822e+02 7.202e+02 1.423e+03, threshold=1.164e+03, percent-clipped=5.0 +2023-03-28 02:34:43,954 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19833.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 02:34:44,073 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4831, 4.7604, 5.1311, 4.7782, 4.3481, 5.0511, 4.8158, 5.3197], + device='cuda:0'), covar=tensor([0.1245, 0.0325, 0.0445, 0.0314, 0.0603, 0.0393, 0.0346, 0.0343], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0186, 0.0182, 0.0192, 0.0180, 0.0189, 0.0183, 0.0172], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 02:35:24,439 INFO [train.py:892] (0/4) Epoch 11, batch 1300, loss[loss=0.2081, simple_loss=0.2685, pruned_loss=0.07384, over 19813.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.2896, pruned_loss=0.08668, over 3941380.11 frames. ], batch size: 103, lr: 1.42e-02, grad_scale: 16.0 +2023-03-28 02:36:44,108 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9583, 4.0898, 2.4170, 4.4956, 4.5833, 1.8924, 3.8402, 3.3668], + device='cuda:0'), covar=tensor([0.0654, 0.0801, 0.2512, 0.0495, 0.0321, 0.2859, 0.0855, 0.0644], + device='cuda:0'), in_proj_covar=tensor([0.0187, 0.0211, 0.0208, 0.0196, 0.0153, 0.0193, 0.0218, 0.0153], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 02:37:16,651 INFO [train.py:892] (0/4) Epoch 11, batch 1350, loss[loss=0.2329, simple_loss=0.2848, pruned_loss=0.09052, over 19798.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.2899, pruned_loss=0.08672, over 3941743.60 frames. ], batch size: 191, lr: 1.42e-02, grad_scale: 16.0 +2023-03-28 02:37:38,402 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.602e+02 5.196e+02 5.941e+02 7.329e+02 1.053e+03, threshold=1.188e+03, percent-clipped=0.0 +2023-03-28 02:38:06,809 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4521, 3.6239, 3.7611, 4.7397, 2.7126, 3.3278, 2.8272, 2.5717], + device='cuda:0'), covar=tensor([0.0464, 0.2397, 0.0965, 0.0208, 0.2363, 0.0837, 0.1284, 0.1898], + device='cuda:0'), in_proj_covar=tensor([0.0193, 0.0326, 0.0226, 0.0146, 0.0235, 0.0171, 0.0193, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 02:39:10,160 INFO [train.py:892] (0/4) Epoch 11, batch 1400, loss[loss=0.2008, simple_loss=0.2718, pruned_loss=0.06492, over 19677.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.2886, pruned_loss=0.08543, over 3944448.25 frames. ], batch size: 55, lr: 1.42e-02, grad_scale: 16.0 +2023-03-28 02:40:05,549 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19974.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:40:32,910 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-28 02:41:02,860 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-20000.pt +2023-03-28 02:41:09,344 INFO [train.py:892] (0/4) Epoch 11, batch 1450, loss[loss=0.2281, simple_loss=0.2874, pruned_loss=0.08441, over 19780.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.2872, pruned_loss=0.08433, over 3946876.39 frames. ], batch size: 215, lr: 1.42e-02, grad_scale: 16.0 +2023-03-28 02:41:30,118 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.362e+02 4.773e+02 5.638e+02 7.039e+02 1.448e+03, threshold=1.128e+03, percent-clipped=1.0 +2023-03-28 02:41:54,707 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20020.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:43:00,408 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20050.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:43:01,872 INFO [train.py:892] (0/4) Epoch 11, batch 1500, loss[loss=0.2169, simple_loss=0.2844, pruned_loss=0.07468, over 19559.00 frames. ], tot_loss[loss=0.2276, simple_loss=0.2871, pruned_loss=0.08406, over 3948424.76 frames. ], batch size: 60, lr: 1.41e-02, grad_scale: 16.0 +2023-03-28 02:44:11,215 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20081.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:44:30,078 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20089.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:44:50,407 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20098.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:44:55,659 INFO [train.py:892] (0/4) Epoch 11, batch 1550, loss[loss=0.2094, simple_loss=0.2741, pruned_loss=0.07237, over 19948.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.2859, pruned_loss=0.08278, over 3950305.57 frames. ], batch size: 52, lr: 1.41e-02, grad_scale: 16.0 +2023-03-28 02:45:16,070 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.591e+02 5.026e+02 5.881e+02 6.796e+02 1.345e+03, threshold=1.176e+03, percent-clipped=3.0 +2023-03-28 02:45:21,258 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20112.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:46:00,234 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0902, 4.8425, 4.8406, 5.2553, 4.9351, 5.4924, 5.1742, 5.3309], + device='cuda:0'), covar=tensor([0.0771, 0.0337, 0.0465, 0.0306, 0.0567, 0.0278, 0.0692, 0.0506], + device='cuda:0'), in_proj_covar=tensor([0.0128, 0.0147, 0.0173, 0.0143, 0.0143, 0.0124, 0.0132, 0.0162], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 02:46:41,112 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20150.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:46:42,452 INFO [train.py:892] (0/4) Epoch 11, batch 1600, loss[loss=0.2232, simple_loss=0.2786, pruned_loss=0.08391, over 19822.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.2871, pruned_loss=0.08306, over 3950093.59 frames. ], batch size: 187, lr: 1.41e-02, grad_scale: 16.0 +2023-03-28 02:47:33,345 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20173.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:47:39,806 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.6877, 1.8521, 1.5686, 1.0442, 1.5836, 1.7645, 1.7208, 1.6942], + device='cuda:0'), covar=tensor([0.0204, 0.0198, 0.0220, 0.0475, 0.0353, 0.0216, 0.0160, 0.0155], + device='cuda:0'), in_proj_covar=tensor([0.0061, 0.0056, 0.0065, 0.0075, 0.0076, 0.0051, 0.0047, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-28 02:48:38,437 INFO [train.py:892] (0/4) Epoch 11, batch 1650, loss[loss=0.196, simple_loss=0.2664, pruned_loss=0.06285, over 19889.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.2844, pruned_loss=0.08176, over 3950960.13 frames. ], batch size: 63, lr: 1.41e-02, grad_scale: 16.0 +2023-03-28 02:48:45,166 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20204.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:48:56,034 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.919e+02 4.977e+02 5.709e+02 7.210e+02 1.480e+03, threshold=1.142e+03, percent-clipped=1.0 +2023-03-28 02:49:46,085 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.83 vs. limit=5.0 +2023-03-28 02:50:27,710 INFO [train.py:892] (0/4) Epoch 11, batch 1700, loss[loss=0.2011, simple_loss=0.2556, pruned_loss=0.07331, over 19814.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2842, pruned_loss=0.08153, over 3949968.91 frames. ], batch size: 123, lr: 1.41e-02, grad_scale: 16.0 +2023-03-28 02:50:58,624 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20265.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:51:07,204 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-03-28 02:51:19,700 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20274.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:52:12,816 INFO [train.py:892] (0/4) Epoch 11, batch 1750, loss[loss=0.2049, simple_loss=0.2547, pruned_loss=0.07756, over 19769.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.2853, pruned_loss=0.08267, over 3949987.63 frames. ], batch size: 130, lr: 1.41e-02, grad_scale: 16.0 +2023-03-28 02:52:29,858 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.218e+02 5.025e+02 5.710e+02 7.275e+02 1.664e+03, threshold=1.142e+03, percent-clipped=3.0 +2023-03-28 02:52:53,382 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20322.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:53:48,063 INFO [train.py:892] (0/4) Epoch 11, batch 1800, loss[loss=0.2602, simple_loss=0.3225, pruned_loss=0.09889, over 19657.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.285, pruned_loss=0.08287, over 3949570.10 frames. ], batch size: 79, lr: 1.40e-02, grad_scale: 16.0 +2023-03-28 02:54:35,640 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20376.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:54:48,408 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-03-28 02:54:57,969 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20388.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:55:18,949 INFO [train.py:892] (0/4) Epoch 11, batch 1850, loss[loss=0.214, simple_loss=0.2881, pruned_loss=0.06992, over 19570.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.2882, pruned_loss=0.08361, over 3946627.41 frames. ], batch size: 53, lr: 1.40e-02, grad_scale: 16.0 +2023-03-28 02:55:26,690 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-11.pt +2023-03-28 02:56:26,116 INFO [train.py:892] (0/4) Epoch 12, batch 0, loss[loss=0.2052, simple_loss=0.2797, pruned_loss=0.0654, over 19839.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2797, pruned_loss=0.0654, over 19839.00 frames. ], batch size: 58, lr: 1.34e-02, grad_scale: 16.0 +2023-03-28 02:56:26,117 INFO [train.py:917] (0/4) Computing validation loss +2023-03-28 02:56:56,309 INFO [train.py:926] (0/4) Epoch 12, validation: loss=0.1761, simple_loss=0.2565, pruned_loss=0.0478, over 2883724.00 frames. +2023-03-28 02:56:56,310 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-28 02:57:06,915 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.354e+02 5.231e+02 6.194e+02 7.302e+02 1.843e+03, threshold=1.239e+03, percent-clipped=4.0 +2023-03-28 02:57:28,040 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20418.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:58:29,318 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20445.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:58:39,755 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20449.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:58:54,029 INFO [train.py:892] (0/4) Epoch 12, batch 50, loss[loss=0.2755, simple_loss=0.3277, pruned_loss=0.1117, over 19806.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2818, pruned_loss=0.08205, over 891109.32 frames. ], batch size: 229, lr: 1.34e-02, grad_scale: 16.0 +2023-03-28 02:59:15,849 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0963, 4.1359, 4.5576, 4.1644, 3.8693, 4.4454, 4.2085, 4.6027], + device='cuda:0'), covar=tensor([0.0943, 0.0304, 0.0296, 0.0303, 0.0796, 0.0359, 0.0370, 0.0290], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0185, 0.0179, 0.0188, 0.0178, 0.0189, 0.0182, 0.0171], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 02:59:19,988 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20468.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 02:59:47,620 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20479.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:00:46,531 INFO [train.py:892] (0/4) Epoch 12, batch 100, loss[loss=0.215, simple_loss=0.2699, pruned_loss=0.08007, over 19807.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2827, pruned_loss=0.0811, over 1571111.63 frames. ], batch size: 202, lr: 1.34e-02, grad_scale: 16.0 +2023-03-28 03:00:55,044 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.431e+02 4.892e+02 6.215e+02 7.181e+02 1.654e+03, threshold=1.243e+03, percent-clipped=3.0 +2023-03-28 03:01:59,772 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.71 vs. limit=2.0 +2023-03-28 03:02:39,167 INFO [train.py:892] (0/4) Epoch 12, batch 150, loss[loss=0.2122, simple_loss=0.2656, pruned_loss=0.07937, over 19822.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.281, pruned_loss=0.0791, over 2099269.11 frames. ], batch size: 187, lr: 1.34e-02, grad_scale: 16.0 +2023-03-28 03:02:51,576 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20560.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:03:55,671 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20588.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:04:38,603 INFO [train.py:892] (0/4) Epoch 12, batch 200, loss[loss=0.2536, simple_loss=0.308, pruned_loss=0.09966, over 19707.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2829, pruned_loss=0.08072, over 2509243.39 frames. ], batch size: 295, lr: 1.34e-02, grad_scale: 16.0 +2023-03-28 03:04:47,254 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.285e+02 4.571e+02 5.228e+02 6.546e+02 1.569e+03, threshold=1.046e+03, percent-clipped=1.0 +2023-03-28 03:06:25,620 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20649.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:06:39,385 INFO [train.py:892] (0/4) Epoch 12, batch 250, loss[loss=0.2347, simple_loss=0.3011, pruned_loss=0.08413, over 19827.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2815, pruned_loss=0.07981, over 2829004.77 frames. ], batch size: 90, lr: 1.34e-02, grad_scale: 16.0 +2023-03-28 03:07:24,994 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20676.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:08:16,209 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2264, 3.3697, 3.6150, 4.1912, 2.6805, 3.1725, 2.7901, 2.4788], + device='cuda:0'), covar=tensor([0.0377, 0.2324, 0.0836, 0.0232, 0.2101, 0.0673, 0.1109, 0.1950], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0329, 0.0222, 0.0150, 0.0237, 0.0173, 0.0193, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 03:08:28,295 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6836, 2.0623, 2.7196, 3.1897, 3.5315, 3.6453, 3.6138, 3.7750], + device='cuda:0'), covar=tensor([0.0807, 0.1753, 0.1047, 0.0461, 0.0367, 0.0218, 0.0267, 0.0262], + device='cuda:0'), in_proj_covar=tensor([0.0137, 0.0171, 0.0158, 0.0127, 0.0111, 0.0104, 0.0098, 0.0095], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 03:08:28,359 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.3030, 2.5480, 2.1351, 1.6237, 2.4639, 2.5324, 2.4997, 2.4943], + device='cuda:0'), covar=tensor([0.0231, 0.0220, 0.0220, 0.0546, 0.0279, 0.0176, 0.0163, 0.0201], + device='cuda:0'), in_proj_covar=tensor([0.0062, 0.0058, 0.0066, 0.0076, 0.0077, 0.0052, 0.0048, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-28 03:08:30,761 INFO [train.py:892] (0/4) Epoch 12, batch 300, loss[loss=0.2038, simple_loss=0.2599, pruned_loss=0.07383, over 19763.00 frames. ], tot_loss[loss=0.2227, simple_loss=0.2837, pruned_loss=0.08087, over 3077130.82 frames. ], batch size: 125, lr: 1.33e-02, grad_scale: 16.0 +2023-03-28 03:08:41,180 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.455e+02 4.985e+02 6.229e+02 8.000e+02 1.241e+03, threshold=1.246e+03, percent-clipped=1.0 +2023-03-28 03:09:12,842 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20724.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:09:57,908 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20744.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:10:01,806 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20745.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:10:24,868 INFO [train.py:892] (0/4) Epoch 12, batch 350, loss[loss=0.2224, simple_loss=0.2932, pruned_loss=0.07578, over 19661.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2825, pruned_loss=0.07974, over 3270806.89 frames. ], batch size: 55, lr: 1.33e-02, grad_scale: 16.0 +2023-03-28 03:10:52,050 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20768.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:11:06,367 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20774.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:11:12,746 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.27 vs. limit=5.0 +2023-03-28 03:11:52,566 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20793.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:12:20,229 INFO [train.py:892] (0/4) Epoch 12, batch 400, loss[loss=0.2824, simple_loss=0.3346, pruned_loss=0.1151, over 19698.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.283, pruned_loss=0.08057, over 3422389.67 frames. ], batch size: 337, lr: 1.33e-02, grad_scale: 16.0 +2023-03-28 03:12:28,473 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.077e+02 4.871e+02 5.546e+02 6.425e+02 9.713e+02, threshold=1.109e+03, percent-clipped=0.0 +2023-03-28 03:12:42,752 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20816.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:13:25,254 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.8602, 2.0820, 1.7457, 1.1736, 1.8542, 1.9885, 1.9293, 1.9819], + device='cuda:0'), covar=tensor([0.0230, 0.0167, 0.0244, 0.0553, 0.0345, 0.0200, 0.0158, 0.0171], + device='cuda:0'), in_proj_covar=tensor([0.0062, 0.0058, 0.0066, 0.0076, 0.0077, 0.0051, 0.0048, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-28 03:13:58,635 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2226, 4.0339, 4.0451, 3.8272, 4.2182, 3.0331, 3.5394, 2.1825], + device='cuda:0'), covar=tensor([0.0191, 0.0172, 0.0132, 0.0166, 0.0127, 0.0798, 0.0624, 0.1317], + device='cuda:0'), in_proj_covar=tensor([0.0083, 0.0114, 0.0095, 0.0110, 0.0099, 0.0118, 0.0127, 0.0112], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 03:14:10,008 INFO [train.py:892] (0/4) Epoch 12, batch 450, loss[loss=0.2525, simple_loss=0.3027, pruned_loss=0.1011, over 19748.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2835, pruned_loss=0.08107, over 3539941.33 frames. ], batch size: 276, lr: 1.33e-02, grad_scale: 16.0 +2023-03-28 03:14:21,470 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20860.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:15:08,567 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.20 vs. limit=5.0 +2023-03-28 03:16:03,419 INFO [train.py:892] (0/4) Epoch 12, batch 500, loss[loss=0.2137, simple_loss=0.2819, pruned_loss=0.07273, over 19577.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.2845, pruned_loss=0.08213, over 3631187.75 frames. ], batch size: 53, lr: 1.33e-02, grad_scale: 16.0 +2023-03-28 03:16:08,878 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20908.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:16:11,894 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.335e+02 4.795e+02 5.889e+02 7.056e+02 1.371e+03, threshold=1.178e+03, percent-clipped=3.0 +2023-03-28 03:16:24,457 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4372, 4.3351, 4.3463, 4.1413, 4.4946, 3.1985, 3.6983, 2.1541], + device='cuda:0'), covar=tensor([0.0257, 0.0189, 0.0154, 0.0180, 0.0153, 0.0799, 0.0758, 0.1611], + device='cuda:0'), in_proj_covar=tensor([0.0083, 0.0115, 0.0096, 0.0110, 0.0099, 0.0118, 0.0126, 0.0113], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 03:16:55,126 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0074, 3.2671, 3.4424, 2.6954, 3.6114, 2.8308, 2.9084, 3.4505], + device='cuda:0'), covar=tensor([0.0661, 0.0290, 0.0364, 0.0597, 0.0201, 0.0331, 0.0377, 0.0248], + device='cuda:0'), in_proj_covar=tensor([0.0058, 0.0059, 0.0063, 0.0090, 0.0057, 0.0056, 0.0053, 0.0049], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-03-28 03:17:29,218 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20944.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:17:57,146 INFO [train.py:892] (0/4) Epoch 12, batch 550, loss[loss=0.2075, simple_loss=0.279, pruned_loss=0.06797, over 19793.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.283, pruned_loss=0.08101, over 3702829.80 frames. ], batch size: 45, lr: 1.33e-02, grad_scale: 16.0 +2023-03-28 03:18:38,804 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-28 03:19:54,088 INFO [train.py:892] (0/4) Epoch 12, batch 600, loss[loss=0.1956, simple_loss=0.2624, pruned_loss=0.06444, over 19765.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.284, pruned_loss=0.08124, over 3757618.66 frames. ], batch size: 113, lr: 1.32e-02, grad_scale: 16.0 +2023-03-28 03:20:01,790 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.833e+02 4.713e+02 5.422e+02 6.814e+02 1.308e+03, threshold=1.084e+03, percent-clipped=1.0 +2023-03-28 03:20:33,770 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4274, 4.4757, 4.9143, 4.4207, 4.1784, 4.7561, 4.5436, 5.0339], + device='cuda:0'), covar=tensor([0.0983, 0.0350, 0.0337, 0.0336, 0.0690, 0.0374, 0.0388, 0.0296], + device='cuda:0'), in_proj_covar=tensor([0.0250, 0.0188, 0.0187, 0.0193, 0.0182, 0.0193, 0.0186, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 03:20:50,177 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-28 03:21:20,873 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21044.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:21:37,346 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21052.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:21:45,740 INFO [train.py:892] (0/4) Epoch 12, batch 650, loss[loss=0.2633, simple_loss=0.3113, pruned_loss=0.1076, over 19764.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2835, pruned_loss=0.08158, over 3800496.26 frames. ], batch size: 244, lr: 1.32e-02, grad_scale: 16.0 +2023-03-28 03:21:52,748 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.0653, 2.1112, 2.1924, 2.0430, 2.1443, 2.1103, 2.1146, 2.3254], + device='cuda:0'), covar=tensor([0.0170, 0.0246, 0.0241, 0.0231, 0.0251, 0.0236, 0.0286, 0.0221], + device='cuda:0'), in_proj_covar=tensor([0.0049, 0.0048, 0.0050, 0.0044, 0.0054, 0.0050, 0.0064, 0.0045], + device='cuda:0'), out_proj_covar=tensor([1.0887e-04, 1.0779e-04, 1.1093e-04, 9.9478e-05, 1.2147e-04, 1.1348e-04, + 1.4259e-04, 1.0268e-04], device='cuda:0') +2023-03-28 03:22:27,441 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21074.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:22:38,884 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9835, 3.9251, 4.3492, 4.1538, 4.2860, 3.7632, 4.0413, 3.9450], + device='cuda:0'), covar=tensor([0.1362, 0.1468, 0.0975, 0.1080, 0.0969, 0.1094, 0.2234, 0.2276], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0246, 0.0302, 0.0234, 0.0220, 0.0217, 0.0291, 0.0326], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 03:23:04,228 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0586, 4.3721, 4.3915, 4.2496, 4.0746, 4.3224, 3.8089, 3.9085], + device='cuda:0'), covar=tensor([0.0468, 0.0443, 0.0527, 0.0481, 0.0647, 0.0588, 0.0693, 0.1038], + device='cuda:0'), in_proj_covar=tensor([0.0201, 0.0203, 0.0240, 0.0204, 0.0193, 0.0188, 0.0215, 0.0257], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 03:23:09,810 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21092.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:23:39,578 INFO [train.py:892] (0/4) Epoch 12, batch 700, loss[loss=0.2405, simple_loss=0.3003, pruned_loss=0.09033, over 19767.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2834, pruned_loss=0.08148, over 3834157.55 frames. ], batch size: 113, lr: 1.32e-02, grad_scale: 16.0 +2023-03-28 03:23:49,566 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.058e+02 5.405e+02 6.727e+02 8.130e+02 1.465e+03, threshold=1.345e+03, percent-clipped=5.0 +2023-03-28 03:23:56,807 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21113.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:24:00,777 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7806, 4.9637, 5.2694, 5.0807, 5.0566, 4.7081, 4.9087, 4.7875], + device='cuda:0'), covar=tensor([0.1530, 0.1065, 0.0928, 0.1025, 0.0729, 0.0796, 0.2186, 0.1970], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0246, 0.0303, 0.0234, 0.0221, 0.0216, 0.0291, 0.0326], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 03:24:07,580 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21118.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:24:17,982 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21122.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:24:24,706 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.9576, 2.0279, 2.2050, 1.9834, 1.9520, 2.0572, 1.9805, 2.3329], + device='cuda:0'), covar=tensor([0.0227, 0.0254, 0.0196, 0.0240, 0.0290, 0.0264, 0.0351, 0.0207], + device='cuda:0'), in_proj_covar=tensor([0.0049, 0.0048, 0.0049, 0.0044, 0.0053, 0.0050, 0.0064, 0.0045], + device='cuda:0'), out_proj_covar=tensor([1.0881e-04, 1.0702e-04, 1.0945e-04, 9.9045e-05, 1.2057e-04, 1.1292e-04, + 1.4173e-04, 1.0227e-04], device='cuda:0') +2023-03-28 03:25:33,728 INFO [train.py:892] (0/4) Epoch 12, batch 750, loss[loss=0.226, simple_loss=0.2914, pruned_loss=0.08031, over 19831.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.281, pruned_loss=0.07995, over 3861752.37 frames. ], batch size: 76, lr: 1.32e-02, grad_scale: 16.0 +2023-03-28 03:26:27,383 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21179.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:27:15,240 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4439, 3.5749, 2.0571, 3.7524, 3.7659, 1.6705, 3.1052, 2.8332], + device='cuda:0'), covar=tensor([0.0675, 0.0757, 0.2646, 0.0592, 0.0370, 0.2975, 0.1129, 0.0691], + device='cuda:0'), in_proj_covar=tensor([0.0192, 0.0215, 0.0209, 0.0201, 0.0159, 0.0195, 0.0220, 0.0157], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 03:27:28,537 INFO [train.py:892] (0/4) Epoch 12, batch 800, loss[loss=0.2014, simple_loss=0.266, pruned_loss=0.06842, over 19866.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2821, pruned_loss=0.0809, over 3881707.65 frames. ], batch size: 104, lr: 1.32e-02, grad_scale: 16.0 +2023-03-28 03:27:37,182 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.248e+02 4.727e+02 5.705e+02 6.809e+02 1.528e+03, threshold=1.141e+03, percent-clipped=1.0 +2023-03-28 03:28:30,069 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-28 03:28:55,957 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21244.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:29:23,826 INFO [train.py:892] (0/4) Epoch 12, batch 850, loss[loss=0.2536, simple_loss=0.3019, pruned_loss=0.1027, over 19821.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2818, pruned_loss=0.08026, over 3896768.33 frames. ], batch size: 184, lr: 1.32e-02, grad_scale: 16.0 +2023-03-28 03:30:32,676 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0600, 4.0822, 2.3916, 4.6616, 4.8280, 1.7678, 3.8679, 3.2591], + device='cuda:0'), covar=tensor([0.0650, 0.0772, 0.2672, 0.0473, 0.0317, 0.3066, 0.0955, 0.0837], + device='cuda:0'), in_proj_covar=tensor([0.0191, 0.0212, 0.0208, 0.0200, 0.0159, 0.0193, 0.0219, 0.0156], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 03:30:45,181 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21292.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:31:14,745 INFO [train.py:892] (0/4) Epoch 12, batch 900, loss[loss=0.2262, simple_loss=0.2952, pruned_loss=0.07858, over 19764.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2812, pruned_loss=0.07927, over 3908725.09 frames. ], batch size: 88, lr: 1.32e-02, grad_scale: 32.0 +2023-03-28 03:31:22,631 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.874e+02 4.590e+02 5.772e+02 7.040e+02 1.378e+03, threshold=1.154e+03, percent-clipped=2.0 +2023-03-28 03:31:27,803 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21312.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:32:07,180 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21328.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:33:10,591 INFO [train.py:892] (0/4) Epoch 12, batch 950, loss[loss=0.227, simple_loss=0.2837, pruned_loss=0.08511, over 19818.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2816, pruned_loss=0.07953, over 3918904.89 frames. ], batch size: 208, lr: 1.31e-02, grad_scale: 16.0 +2023-03-28 03:33:48,230 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21373.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:34:26,615 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21389.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:35:02,712 INFO [train.py:892] (0/4) Epoch 12, batch 1000, loss[loss=0.2652, simple_loss=0.315, pruned_loss=0.1077, over 19687.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2827, pruned_loss=0.08043, over 3924932.68 frames. ], batch size: 325, lr: 1.31e-02, grad_scale: 16.0 +2023-03-28 03:35:08,950 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21408.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:35:14,103 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.713e+02 5.169e+02 6.246e+02 8.325e+02 1.505e+03, threshold=1.249e+03, percent-clipped=4.0 +2023-03-28 03:35:33,480 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1130, 2.1749, 2.4373, 2.3090, 2.1424, 2.0783, 2.0404, 2.4627], + device='cuda:0'), covar=tensor([0.0241, 0.0234, 0.0201, 0.0210, 0.0306, 0.0344, 0.0366, 0.0265], + device='cuda:0'), in_proj_covar=tensor([0.0049, 0.0048, 0.0049, 0.0043, 0.0054, 0.0050, 0.0065, 0.0046], + device='cuda:0'), out_proj_covar=tensor([1.0862e-04, 1.0666e-04, 1.0982e-04, 9.7197e-05, 1.2173e-04, 1.1301e-04, + 1.4312e-04, 1.0377e-04], device='cuda:0') +2023-03-28 03:36:53,857 INFO [train.py:892] (0/4) Epoch 12, batch 1050, loss[loss=0.3417, simple_loss=0.3886, pruned_loss=0.1473, over 19252.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2835, pruned_loss=0.0816, over 3930063.56 frames. ], batch size: 483, lr: 1.31e-02, grad_scale: 16.0 +2023-03-28 03:37:04,398 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.57 vs. limit=5.0 +2023-03-28 03:37:32,640 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21474.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:37:37,094 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0250, 3.3330, 3.0922, 2.3159, 2.9507, 3.1637, 3.0503, 3.1178], + device='cuda:0'), covar=tensor([0.0194, 0.0216, 0.0157, 0.0460, 0.0247, 0.0170, 0.0180, 0.0148], + device='cuda:0'), in_proj_covar=tensor([0.0063, 0.0059, 0.0068, 0.0078, 0.0079, 0.0054, 0.0049, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-28 03:38:44,173 INFO [train.py:892] (0/4) Epoch 12, batch 1100, loss[loss=0.3276, simple_loss=0.3828, pruned_loss=0.1363, over 19220.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2823, pruned_loss=0.08072, over 3934797.52 frames. ], batch size: 483, lr: 1.31e-02, grad_scale: 16.0 +2023-03-28 03:38:56,791 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.523e+02 4.626e+02 5.286e+02 6.822e+02 1.206e+03, threshold=1.057e+03, percent-clipped=0.0 +2023-03-28 03:39:51,163 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8054, 2.0581, 2.1748, 1.9086, 2.4485, 2.5264, 2.7332, 2.8735], + device='cuda:0'), covar=tensor([0.0740, 0.1540, 0.1622, 0.1826, 0.1196, 0.1120, 0.0675, 0.0573], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0220, 0.0241, 0.0236, 0.0265, 0.0231, 0.0184, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 03:40:01,788 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.8167, 1.9880, 1.7276, 1.0935, 1.7906, 1.8737, 1.8283, 1.8997], + device='cuda:0'), covar=tensor([0.0244, 0.0187, 0.0260, 0.0597, 0.0407, 0.0209, 0.0192, 0.0187], + device='cuda:0'), in_proj_covar=tensor([0.0063, 0.0059, 0.0068, 0.0077, 0.0078, 0.0054, 0.0049, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-28 03:40:37,774 INFO [train.py:892] (0/4) Epoch 12, batch 1150, loss[loss=0.2298, simple_loss=0.2899, pruned_loss=0.08482, over 19771.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2835, pruned_loss=0.08121, over 3936072.12 frames. ], batch size: 69, lr: 1.31e-02, grad_scale: 16.0 +2023-03-28 03:42:06,785 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21595.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:42:32,682 INFO [train.py:892] (0/4) Epoch 12, batch 1200, loss[loss=0.253, simple_loss=0.3084, pruned_loss=0.09884, over 19746.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.2857, pruned_loss=0.0824, over 3938536.17 frames. ], batch size: 259, lr: 1.31e-02, grad_scale: 16.0 +2023-03-28 03:42:43,490 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.726e+02 4.687e+02 5.841e+02 7.196e+02 1.649e+03, threshold=1.168e+03, percent-clipped=4.0 +2023-03-28 03:42:48,463 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21613.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:44:28,814 INFO [train.py:892] (0/4) Epoch 12, batch 1250, loss[loss=0.2164, simple_loss=0.2689, pruned_loss=0.08198, over 19731.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.2856, pruned_loss=0.08212, over 3938342.87 frames. ], batch size: 77, lr: 1.31e-02, grad_scale: 16.0 +2023-03-28 03:44:29,847 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21656.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:44:54,379 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21668.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:45:08,201 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21674.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:45:32,073 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21684.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:46:19,270 INFO [train.py:892] (0/4) Epoch 12, batch 1300, loss[loss=0.2343, simple_loss=0.284, pruned_loss=0.0923, over 19828.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.2844, pruned_loss=0.08143, over 3941332.22 frames. ], batch size: 166, lr: 1.30e-02, grad_scale: 16.0 +2023-03-28 03:46:24,320 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21708.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:46:31,046 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.976e+02 4.739e+02 5.550e+02 6.963e+02 1.418e+03, threshold=1.110e+03, percent-clipped=2.0 +2023-03-28 03:48:12,974 INFO [train.py:892] (0/4) Epoch 12, batch 1350, loss[loss=0.2156, simple_loss=0.2783, pruned_loss=0.07644, over 19773.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.2873, pruned_loss=0.08299, over 3941064.21 frames. ], batch size: 130, lr: 1.30e-02, grad_scale: 16.0 +2023-03-28 03:48:13,690 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21756.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:48:27,736 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21763.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:48:52,541 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21774.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:49:18,597 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2524, 5.6325, 5.6353, 5.5417, 5.2795, 5.5763, 4.9922, 5.1284], + device='cuda:0'), covar=tensor([0.0337, 0.0349, 0.0477, 0.0411, 0.0527, 0.0527, 0.0656, 0.0724], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0203, 0.0242, 0.0205, 0.0194, 0.0190, 0.0219, 0.0255], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 03:49:45,625 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4161, 1.8126, 2.0837, 2.7289, 3.0387, 3.2193, 3.1408, 3.2948], + device='cuda:0'), covar=tensor([0.0870, 0.1767, 0.1363, 0.0559, 0.0362, 0.0200, 0.0265, 0.0216], + device='cuda:0'), in_proj_covar=tensor([0.0138, 0.0169, 0.0158, 0.0131, 0.0112, 0.0105, 0.0098, 0.0095], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 03:50:01,072 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0591, 4.0102, 2.2984, 4.3938, 4.3978, 1.8909, 3.6509, 3.3710], + device='cuda:0'), covar=tensor([0.0581, 0.0829, 0.2558, 0.0556, 0.0360, 0.2942, 0.0978, 0.0604], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0218, 0.0212, 0.0207, 0.0167, 0.0197, 0.0222, 0.0161], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 03:50:04,165 INFO [train.py:892] (0/4) Epoch 12, batch 1400, loss[loss=0.2345, simple_loss=0.3007, pruned_loss=0.08411, over 19662.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.2859, pruned_loss=0.08223, over 3943651.33 frames. ], batch size: 50, lr: 1.30e-02, grad_scale: 16.0 +2023-03-28 03:50:16,313 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.208e+02 4.274e+02 5.050e+02 6.143e+02 1.134e+03, threshold=1.010e+03, percent-clipped=1.0 +2023-03-28 03:50:39,028 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-03-28 03:50:40,211 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21822.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:50:47,553 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21824.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:51:33,528 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3235, 3.0235, 3.3628, 2.4263, 3.9824, 2.7925, 3.0722, 3.8794], + device='cuda:0'), covar=tensor([0.0672, 0.0458, 0.0617, 0.0814, 0.0211, 0.0337, 0.0381, 0.0187], + device='cuda:0'), in_proj_covar=tensor([0.0057, 0.0062, 0.0063, 0.0091, 0.0058, 0.0056, 0.0054, 0.0049], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-03-28 03:51:44,499 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21850.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:51:55,131 INFO [train.py:892] (0/4) Epoch 12, batch 1450, loss[loss=0.2622, simple_loss=0.3065, pruned_loss=0.1089, over 19764.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.285, pruned_loss=0.08123, over 3945072.57 frames. ], batch size: 130, lr: 1.30e-02, grad_scale: 16.0 +2023-03-28 03:53:01,129 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.87 vs. limit=5.0 +2023-03-28 03:53:47,754 INFO [train.py:892] (0/4) Epoch 12, batch 1500, loss[loss=0.2085, simple_loss=0.2594, pruned_loss=0.07883, over 19824.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2839, pruned_loss=0.08088, over 3946548.88 frames. ], batch size: 184, lr: 1.30e-02, grad_scale: 16.0 +2023-03-28 03:53:58,746 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.501e+02 4.832e+02 5.724e+02 6.820e+02 1.583e+03, threshold=1.145e+03, percent-clipped=5.0 +2023-03-28 03:53:59,739 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21911.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:55:31,435 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21951.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:55:42,225 INFO [train.py:892] (0/4) Epoch 12, batch 1550, loss[loss=0.1948, simple_loss=0.2562, pruned_loss=0.06667, over 19754.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2845, pruned_loss=0.08081, over 3945959.27 frames. ], batch size: 102, lr: 1.30e-02, grad_scale: 16.0 +2023-03-28 03:56:10,171 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21968.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:56:12,658 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21969.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:56:15,035 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21970.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:56:49,408 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21984.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:57:24,504 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-22000.pt +2023-03-28 03:57:43,217 INFO [train.py:892] (0/4) Epoch 12, batch 1600, loss[loss=0.1989, simple_loss=0.2644, pruned_loss=0.06669, over 19836.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.2856, pruned_loss=0.08135, over 3945880.34 frames. ], batch size: 171, lr: 1.30e-02, grad_scale: 16.0 +2023-03-28 03:57:54,040 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.885e+02 4.412e+02 5.565e+02 6.803e+02 1.208e+03, threshold=1.113e+03, percent-clipped=1.0 +2023-03-28 03:58:08,194 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22016.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:58:39,039 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22031.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:58:41,886 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22032.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 03:59:27,938 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6833, 4.3737, 4.4432, 4.1771, 4.5841, 3.0365, 3.7993, 2.4659], + device='cuda:0'), covar=tensor([0.0195, 0.0176, 0.0151, 0.0177, 0.0160, 0.0875, 0.0800, 0.1297], + device='cuda:0'), in_proj_covar=tensor([0.0083, 0.0118, 0.0098, 0.0114, 0.0102, 0.0121, 0.0131, 0.0116], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 03:59:37,626 INFO [train.py:892] (0/4) Epoch 12, batch 1650, loss[loss=0.2288, simple_loss=0.2942, pruned_loss=0.08173, over 19884.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.2854, pruned_loss=0.08159, over 3945292.41 frames. ], batch size: 61, lr: 1.29e-02, grad_scale: 16.0 +2023-03-28 04:01:07,740 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3055, 2.5762, 3.5407, 2.9456, 3.1417, 3.2433, 1.9816, 2.1248], + device='cuda:0'), covar=tensor([0.0790, 0.2194, 0.0431, 0.0708, 0.1276, 0.0838, 0.1776, 0.2083], + device='cuda:0'), in_proj_covar=tensor([0.0302, 0.0335, 0.0270, 0.0223, 0.0328, 0.0270, 0.0288, 0.0267], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 04:01:33,557 INFO [train.py:892] (0/4) Epoch 12, batch 1700, loss[loss=0.2401, simple_loss=0.2947, pruned_loss=0.09274, over 19793.00 frames. ], tot_loss[loss=0.226, simple_loss=0.2871, pruned_loss=0.08243, over 3944049.54 frames. ], batch size: 168, lr: 1.29e-02, grad_scale: 16.0 +2023-03-28 04:01:45,050 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.089e+02 4.459e+02 5.514e+02 6.707e+02 1.132e+03, threshold=1.103e+03, percent-clipped=1.0 +2023-03-28 04:02:01,760 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22119.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:02:26,568 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22130.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:03:17,836 INFO [train.py:892] (0/4) Epoch 12, batch 1750, loss[loss=0.2365, simple_loss=0.3088, pruned_loss=0.0821, over 19875.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.2856, pruned_loss=0.08161, over 3946397.46 frames. ], batch size: 53, lr: 1.29e-02, grad_scale: 16.0 +2023-03-28 04:03:56,075 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22176.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:03:57,689 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9895, 2.9346, 1.6884, 3.7439, 3.3935, 3.6287, 3.6556, 2.8358], + device='cuda:0'), covar=tensor([0.0564, 0.0521, 0.1725, 0.0419, 0.0536, 0.0327, 0.0549, 0.0717], + device='cuda:0'), in_proj_covar=tensor([0.0124, 0.0121, 0.0132, 0.0123, 0.0106, 0.0103, 0.0119, 0.0126], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 04:04:22,150 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22191.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:04:36,028 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6118, 1.9929, 2.6261, 3.0505, 3.4585, 3.6298, 3.6607, 3.7322], + device='cuda:0'), covar=tensor([0.0841, 0.1868, 0.1139, 0.0501, 0.0345, 0.0206, 0.0215, 0.0317], + device='cuda:0'), in_proj_covar=tensor([0.0137, 0.0169, 0.0158, 0.0131, 0.0111, 0.0105, 0.0097, 0.0096], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 04:04:47,347 INFO [train.py:892] (0/4) Epoch 12, batch 1800, loss[loss=0.2064, simple_loss=0.2648, pruned_loss=0.07406, over 19944.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2847, pruned_loss=0.08136, over 3946223.12 frames. ], batch size: 46, lr: 1.29e-02, grad_scale: 16.0 +2023-03-28 04:04:47,821 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22206.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:04:55,760 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.946e+02 4.603e+02 5.856e+02 6.792e+02 1.518e+03, threshold=1.171e+03, percent-clipped=5.0 +2023-03-28 04:05:03,169 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22215.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:05:41,017 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22237.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:05:41,070 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22237.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:06:03,985 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22251.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:06:11,640 INFO [train.py:892] (0/4) Epoch 12, batch 1850, loss[loss=0.2187, simple_loss=0.2888, pruned_loss=0.07431, over 19588.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2847, pruned_loss=0.08041, over 3945885.69 frames. ], batch size: 53, lr: 1.29e-02, grad_scale: 16.0 +2023-03-28 04:06:19,133 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-12.pt +2023-03-28 04:07:10,138 INFO [train.py:892] (0/4) Epoch 13, batch 0, loss[loss=0.212, simple_loss=0.2753, pruned_loss=0.07435, over 19620.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2753, pruned_loss=0.07435, over 19620.00 frames. ], batch size: 52, lr: 1.24e-02, grad_scale: 16.0 +2023-03-28 04:07:10,140 INFO [train.py:917] (0/4) Computing validation loss +2023-03-28 04:07:38,662 INFO [train.py:926] (0/4) Epoch 13, validation: loss=0.1745, simple_loss=0.2543, pruned_loss=0.04732, over 2883724.00 frames. +2023-03-28 04:07:38,663 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-28 04:07:56,519 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22269.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:07:58,714 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.6662, 6.0263, 6.0593, 5.9550, 5.8109, 6.0208, 5.3401, 5.3422], + device='cuda:0'), covar=tensor([0.0382, 0.0437, 0.0517, 0.0375, 0.0578, 0.0499, 0.0668, 0.0898], + device='cuda:0'), in_proj_covar=tensor([0.0202, 0.0209, 0.0244, 0.0209, 0.0194, 0.0192, 0.0219, 0.0258], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 04:08:13,494 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22276.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:09:03,343 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22298.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:09:05,126 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22299.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:09:32,909 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.888e+02 4.546e+02 5.479e+02 6.348e+02 1.428e+03, threshold=1.096e+03, percent-clipped=1.0 +2023-03-28 04:09:32,935 INFO [train.py:892] (0/4) Epoch 13, batch 50, loss[loss=0.2329, simple_loss=0.3011, pruned_loss=0.0823, over 19740.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.2814, pruned_loss=0.08171, over 888014.08 frames. ], batch size: 80, lr: 1.24e-02, grad_scale: 16.0 +2023-03-28 04:09:45,904 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22317.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:10:02,884 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7977, 2.0948, 2.6282, 3.1625, 3.5974, 3.8541, 3.8878, 3.9428], + device='cuda:0'), covar=tensor([0.0829, 0.1848, 0.1290, 0.0500, 0.0353, 0.0173, 0.0260, 0.0305], + device='cuda:0'), in_proj_covar=tensor([0.0136, 0.0166, 0.0158, 0.0129, 0.0111, 0.0105, 0.0097, 0.0094], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 04:10:08,380 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22326.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:11:24,757 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.59 vs. limit=2.0 +2023-03-28 04:11:26,801 INFO [train.py:892] (0/4) Epoch 13, batch 100, loss[loss=0.2053, simple_loss=0.2609, pruned_loss=0.07485, over 19824.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2801, pruned_loss=0.08045, over 1568218.39 frames. ], batch size: 146, lr: 1.24e-02, grad_scale: 16.0 +2023-03-28 04:11:32,575 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0911, 2.3669, 2.9987, 3.5514, 3.9945, 4.5456, 4.3855, 4.5010], + device='cuda:0'), covar=tensor([0.0773, 0.1954, 0.1299, 0.0454, 0.0298, 0.0150, 0.0229, 0.0345], + device='cuda:0'), in_proj_covar=tensor([0.0137, 0.0166, 0.0158, 0.0129, 0.0110, 0.0105, 0.0098, 0.0095], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 04:12:24,433 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22385.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:12:46,788 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.1823, 5.4322, 5.7533, 5.4591, 5.4209, 5.0823, 5.3146, 5.2311], + device='cuda:0'), covar=tensor([0.1211, 0.1300, 0.0870, 0.0952, 0.0604, 0.0873, 0.1994, 0.1824], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0251, 0.0305, 0.0235, 0.0224, 0.0221, 0.0297, 0.0330], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 04:13:22,673 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.479e+02 4.716e+02 5.729e+02 6.753e+02 1.100e+03, threshold=1.146e+03, percent-clipped=1.0 +2023-03-28 04:13:22,713 INFO [train.py:892] (0/4) Epoch 13, batch 150, loss[loss=0.2262, simple_loss=0.2821, pruned_loss=0.08514, over 19781.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2811, pruned_loss=0.08056, over 2096204.87 frames. ], batch size: 193, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:13:39,951 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22419.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:14:41,921 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22446.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:15:13,586 INFO [train.py:892] (0/4) Epoch 13, batch 200, loss[loss=0.201, simple_loss=0.2664, pruned_loss=0.06778, over 19835.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2795, pruned_loss=0.07934, over 2507941.41 frames. ], batch size: 93, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:15:25,022 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7002, 3.2163, 4.9063, 4.0884, 4.6420, 4.7457, 4.7014, 4.5960], + device='cuda:0'), covar=tensor([0.0208, 0.0659, 0.0090, 0.0807, 0.0104, 0.0171, 0.0115, 0.0099], + device='cuda:0'), in_proj_covar=tensor([0.0079, 0.0088, 0.0071, 0.0139, 0.0065, 0.0079, 0.0074, 0.0064], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 04:15:26,928 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22467.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:16:09,390 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22486.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:16:56,127 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22506.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:17:04,713 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.872e+02 4.579e+02 5.437e+02 6.895e+02 1.289e+03, threshold=1.087e+03, percent-clipped=2.0 +2023-03-28 04:17:04,742 INFO [train.py:892] (0/4) Epoch 13, batch 250, loss[loss=0.3033, simple_loss=0.3844, pruned_loss=0.1111, over 18732.00 frames. ], tot_loss[loss=0.2194, simple_loss=0.2806, pruned_loss=0.07912, over 2825493.70 frames. ], batch size: 564, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:17:55,677 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22532.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:18:06,711 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1025, 2.8644, 3.1424, 2.5447, 3.3570, 2.6073, 2.8405, 3.2974], + device='cuda:0'), covar=tensor([0.0399, 0.0396, 0.0483, 0.0678, 0.0270, 0.0370, 0.0430, 0.0255], + device='cuda:0'), in_proj_covar=tensor([0.0060, 0.0064, 0.0065, 0.0094, 0.0062, 0.0059, 0.0056, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 04:18:06,758 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4646, 2.7425, 2.4589, 1.9055, 2.5731, 2.7010, 2.6118, 2.7287], + device='cuda:0'), covar=tensor([0.0234, 0.0237, 0.0213, 0.0443, 0.0289, 0.0183, 0.0164, 0.0144], + device='cuda:0'), in_proj_covar=tensor([0.0064, 0.0061, 0.0068, 0.0077, 0.0079, 0.0054, 0.0050, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-28 04:18:19,867 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.43 vs. limit=5.0 +2023-03-28 04:18:44,481 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5327, 2.9487, 3.4518, 3.2541, 3.7335, 3.8112, 4.2925, 4.7947], + device='cuda:0'), covar=tensor([0.0450, 0.1551, 0.1348, 0.1815, 0.1617, 0.1095, 0.0436, 0.0410], + device='cuda:0'), in_proj_covar=tensor([0.0208, 0.0217, 0.0238, 0.0234, 0.0264, 0.0229, 0.0181, 0.0191], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 04:18:46,170 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22554.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:19:01,078 INFO [train.py:892] (0/4) Epoch 13, batch 300, loss[loss=0.1954, simple_loss=0.276, pruned_loss=0.05744, over 19579.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2811, pruned_loss=0.079, over 3074953.19 frames. ], batch size: 53, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:19:22,834 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22571.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:20:07,384 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-03-28 04:20:14,502 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22593.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:20:52,587 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.162e+02 4.855e+02 5.580e+02 6.636e+02 1.112e+03, threshold=1.116e+03, percent-clipped=2.0 +2023-03-28 04:20:52,609 INFO [train.py:892] (0/4) Epoch 13, batch 350, loss[loss=0.2023, simple_loss=0.2661, pruned_loss=0.0693, over 19789.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2821, pruned_loss=0.07965, over 3267026.27 frames. ], batch size: 73, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:21:24,600 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22626.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:21:52,175 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22639.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:22:35,970 INFO [train.py:892] (0/4) Epoch 13, batch 400, loss[loss=0.1991, simple_loss=0.2617, pruned_loss=0.06828, over 19747.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.2809, pruned_loss=0.07875, over 3418608.32 frames. ], batch size: 110, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:23:05,893 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22674.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:23:36,955 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5235, 4.4104, 4.9975, 4.7551, 4.8161, 4.2287, 4.6318, 4.5373], + device='cuda:0'), covar=tensor([0.1291, 0.1259, 0.0819, 0.1078, 0.0735, 0.1040, 0.1765, 0.1753], + device='cuda:0'), in_proj_covar=tensor([0.0255, 0.0251, 0.0308, 0.0240, 0.0227, 0.0223, 0.0298, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 04:24:00,898 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22700.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 04:24:25,241 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.862e+02 4.701e+02 5.537e+02 6.698e+02 1.146e+03, threshold=1.107e+03, percent-clipped=1.0 +2023-03-28 04:24:25,270 INFO [train.py:892] (0/4) Epoch 13, batch 450, loss[loss=0.2037, simple_loss=0.2639, pruned_loss=0.07171, over 19826.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2815, pruned_loss=0.07914, over 3536967.85 frames. ], batch size: 121, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:25:35,704 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22741.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:25:49,448 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7191, 2.8000, 2.9365, 2.2104, 2.9704, 2.2898, 2.7204, 2.8390], + device='cuda:0'), covar=tensor([0.0428, 0.0289, 0.0345, 0.0722, 0.0271, 0.0412, 0.0321, 0.0322], + device='cuda:0'), in_proj_covar=tensor([0.0060, 0.0063, 0.0065, 0.0093, 0.0061, 0.0058, 0.0056, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 04:26:18,711 INFO [train.py:892] (0/4) Epoch 13, batch 500, loss[loss=0.1789, simple_loss=0.2433, pruned_loss=0.05721, over 19800.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2823, pruned_loss=0.0798, over 3628540.65 frames. ], batch size: 86, lr: 1.23e-02, grad_scale: 16.0 +2023-03-28 04:27:08,181 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8291, 2.2302, 2.5642, 3.0579, 3.7351, 3.9305, 3.7857, 4.0328], + device='cuda:0'), covar=tensor([0.0818, 0.1569, 0.1192, 0.0538, 0.0291, 0.0164, 0.0301, 0.0233], + device='cuda:0'), in_proj_covar=tensor([0.0140, 0.0167, 0.0160, 0.0130, 0.0113, 0.0105, 0.0100, 0.0096], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 04:27:17,024 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22786.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:27:33,164 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22794.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:28:12,226 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.034e+02 4.675e+02 5.713e+02 7.210e+02 1.341e+03, threshold=1.143e+03, percent-clipped=1.0 +2023-03-28 04:28:12,253 INFO [train.py:892] (0/4) Epoch 13, batch 550, loss[loss=0.198, simple_loss=0.2596, pruned_loss=0.06819, over 19833.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2821, pruned_loss=0.07966, over 3698330.11 frames. ], batch size: 93, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:28:19,075 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6389, 2.0009, 2.2445, 2.8447, 3.4412, 3.5742, 3.5263, 3.6976], + device='cuda:0'), covar=tensor([0.0827, 0.1775, 0.1322, 0.0596, 0.0333, 0.0202, 0.0260, 0.0241], + device='cuda:0'), in_proj_covar=tensor([0.0139, 0.0167, 0.0159, 0.0130, 0.0113, 0.0105, 0.0099, 0.0096], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 04:28:45,023 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22825.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:29:01,517 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22832.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:29:05,803 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22834.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:29:45,195 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3178, 2.5728, 4.5872, 3.7504, 4.2410, 4.4385, 4.4083, 4.3261], + device='cuda:0'), covar=tensor([0.0170, 0.0813, 0.0083, 0.0957, 0.0115, 0.0174, 0.0130, 0.0118], + device='cuda:0'), in_proj_covar=tensor([0.0082, 0.0090, 0.0072, 0.0143, 0.0066, 0.0081, 0.0075, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 04:29:54,755 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22855.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:30:05,686 INFO [train.py:892] (0/4) Epoch 13, batch 600, loss[loss=0.2006, simple_loss=0.2682, pruned_loss=0.0665, over 19864.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2816, pruned_loss=0.07921, over 3754344.44 frames. ], batch size: 118, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:30:12,988 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.83 vs. limit=5.0 +2023-03-28 04:30:27,404 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22871.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:30:47,649 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22880.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:31:02,327 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22886.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:31:12,418 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.13 vs. limit=5.0 +2023-03-28 04:31:18,017 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22893.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:31:58,585 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 4.701e+02 5.475e+02 6.591e+02 1.011e+03, threshold=1.095e+03, percent-clipped=0.0 +2023-03-28 04:31:58,608 INFO [train.py:892] (0/4) Epoch 13, batch 650, loss[loss=0.2329, simple_loss=0.2936, pruned_loss=0.08609, over 19809.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2819, pruned_loss=0.07922, over 3797092.61 frames. ], batch size: 147, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:32:16,407 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22919.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:33:06,184 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22941.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:33:50,895 INFO [train.py:892] (0/4) Epoch 13, batch 700, loss[loss=0.2351, simple_loss=0.2868, pruned_loss=0.09173, over 19799.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2825, pruned_loss=0.07927, over 3830404.88 frames. ], batch size: 126, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:35:05,857 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22995.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 04:35:40,970 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.530e+02 4.675e+02 5.708e+02 7.114e+02 1.590e+03, threshold=1.142e+03, percent-clipped=5.0 +2023-03-28 04:35:41,003 INFO [train.py:892] (0/4) Epoch 13, batch 750, loss[loss=0.1877, simple_loss=0.2599, pruned_loss=0.05776, over 19918.00 frames. ], tot_loss[loss=0.22, simple_loss=0.2819, pruned_loss=0.07901, over 3856837.65 frames. ], batch size: 45, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:36:06,162 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1590, 3.4003, 3.6467, 4.3987, 2.6960, 3.2816, 2.8303, 2.5521], + device='cuda:0'), covar=tensor([0.0449, 0.2438, 0.0868, 0.0209, 0.2282, 0.0786, 0.1129, 0.1814], + device='cuda:0'), in_proj_covar=tensor([0.0204, 0.0330, 0.0225, 0.0156, 0.0241, 0.0178, 0.0200, 0.0210], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 04:36:49,820 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23041.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:37:34,783 INFO [train.py:892] (0/4) Epoch 13, batch 800, loss[loss=0.2133, simple_loss=0.269, pruned_loss=0.07882, over 19872.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2814, pruned_loss=0.07882, over 3876925.47 frames. ], batch size: 138, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:38:37,334 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23089.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:39:23,148 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23108.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:39:28,924 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.056e+02 4.768e+02 5.306e+02 6.241e+02 1.399e+03, threshold=1.061e+03, percent-clipped=1.0 +2023-03-28 04:39:28,948 INFO [train.py:892] (0/4) Epoch 13, batch 850, loss[loss=0.2019, simple_loss=0.2697, pruned_loss=0.06705, over 19826.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2799, pruned_loss=0.07784, over 3894401.28 frames. ], batch size: 75, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:40:56,348 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23150.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:41:21,498 INFO [train.py:892] (0/4) Epoch 13, batch 900, loss[loss=0.1992, simple_loss=0.2568, pruned_loss=0.07083, over 19849.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2794, pruned_loss=0.07736, over 3907068.31 frames. ], batch size: 137, lr: 1.22e-02, grad_scale: 16.0 +2023-03-28 04:41:39,582 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23169.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:41:44,097 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4010, 1.6246, 1.9997, 2.5689, 2.8467, 3.0025, 2.9092, 3.0744], + device='cuda:0'), covar=tensor([0.0869, 0.2025, 0.1392, 0.0622, 0.0427, 0.0261, 0.0289, 0.0288], + device='cuda:0'), in_proj_covar=tensor([0.0139, 0.0168, 0.0159, 0.0132, 0.0112, 0.0105, 0.0098, 0.0096], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 04:42:09,164 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23181.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:43:08,638 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1014, 3.0321, 2.9167, 2.6838, 2.9919, 2.5034, 2.3138, 1.6434], + device='cuda:0'), covar=tensor([0.0264, 0.0265, 0.0176, 0.0236, 0.0180, 0.0741, 0.0790, 0.1560], + device='cuda:0'), in_proj_covar=tensor([0.0085, 0.0120, 0.0099, 0.0114, 0.0101, 0.0119, 0.0129, 0.0115], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 04:43:15,285 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.371e+02 4.496e+02 5.311e+02 6.592e+02 1.240e+03, threshold=1.062e+03, percent-clipped=1.0 +2023-03-28 04:43:15,312 INFO [train.py:892] (0/4) Epoch 13, batch 950, loss[loss=0.2026, simple_loss=0.272, pruned_loss=0.06664, over 19837.00 frames. ], tot_loss[loss=0.2184, simple_loss=0.2811, pruned_loss=0.07788, over 3915213.88 frames. ], batch size: 93, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:43:28,222 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23217.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:43:36,931 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-03-28 04:43:53,042 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23229.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:45:02,590 INFO [train.py:892] (0/4) Epoch 13, batch 1000, loss[loss=0.1943, simple_loss=0.2644, pruned_loss=0.06213, over 19806.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2806, pruned_loss=0.07751, over 3923025.20 frames. ], batch size: 98, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:45:14,954 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4975, 2.2163, 3.7203, 3.2266, 3.6630, 3.6700, 3.5240, 3.4883], + device='cuda:0'), covar=tensor([0.0247, 0.0813, 0.0102, 0.0588, 0.0108, 0.0224, 0.0159, 0.0135], + device='cuda:0'), in_proj_covar=tensor([0.0083, 0.0091, 0.0074, 0.0145, 0.0067, 0.0082, 0.0076, 0.0066], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 04:45:19,021 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4442, 3.6963, 3.9154, 4.7437, 2.9821, 3.4170, 3.0546, 2.7679], + device='cuda:0'), covar=tensor([0.0418, 0.2015, 0.0871, 0.0236, 0.2115, 0.0797, 0.1080, 0.1690], + device='cuda:0'), in_proj_covar=tensor([0.0203, 0.0332, 0.0229, 0.0159, 0.0240, 0.0178, 0.0202, 0.0209], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 04:45:39,908 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23278.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:46:09,867 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23290.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:46:20,643 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23295.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:46:55,457 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.267e+02 4.704e+02 5.526e+02 6.494e+02 9.984e+02, threshold=1.105e+03, percent-clipped=0.0 +2023-03-28 04:46:55,492 INFO [train.py:892] (0/4) Epoch 13, batch 1050, loss[loss=0.1969, simple_loss=0.2585, pruned_loss=0.06765, over 19767.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2805, pruned_loss=0.07777, over 3927212.08 frames. ], batch size: 113, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:47:58,027 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2914, 4.0626, 4.1188, 3.8806, 4.2362, 3.2048, 3.6178, 2.2700], + device='cuda:0'), covar=tensor([0.0182, 0.0208, 0.0137, 0.0172, 0.0116, 0.0731, 0.0715, 0.1262], + device='cuda:0'), in_proj_covar=tensor([0.0086, 0.0120, 0.0099, 0.0115, 0.0101, 0.0118, 0.0131, 0.0115], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 04:48:07,925 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23343.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:48:38,367 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4500, 4.5180, 4.8759, 4.5016, 4.0973, 4.6996, 4.4881, 5.0550], + device='cuda:0'), covar=tensor([0.0936, 0.0304, 0.0332, 0.0338, 0.0779, 0.0416, 0.0424, 0.0283], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0192, 0.0187, 0.0198, 0.0186, 0.0196, 0.0192, 0.0178], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 04:48:49,794 INFO [train.py:892] (0/4) Epoch 13, batch 1100, loss[loss=0.1862, simple_loss=0.2492, pruned_loss=0.06157, over 19658.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2796, pruned_loss=0.07662, over 3932207.35 frames. ], batch size: 47, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:50:43,307 INFO [train.py:892] (0/4) Epoch 13, batch 1150, loss[loss=0.2445, simple_loss=0.2929, pruned_loss=0.09807, over 19872.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2798, pruned_loss=0.07667, over 3936640.69 frames. ], batch size: 138, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:50:45,463 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.703e+02 4.708e+02 5.742e+02 7.091e+02 1.301e+03, threshold=1.148e+03, percent-clipped=2.0 +2023-03-28 04:52:12,397 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23450.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:52:34,817 INFO [train.py:892] (0/4) Epoch 13, batch 1200, loss[loss=0.2171, simple_loss=0.2732, pruned_loss=0.08051, over 19727.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2801, pruned_loss=0.07716, over 3939059.27 frames. ], batch size: 219, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:52:42,167 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23464.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:53:07,540 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-28 04:53:21,198 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23481.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:53:36,672 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23488.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:53:58,246 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23498.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:54:26,403 INFO [train.py:892] (0/4) Epoch 13, batch 1250, loss[loss=0.2024, simple_loss=0.2615, pruned_loss=0.07164, over 19781.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2797, pruned_loss=0.07712, over 3942350.87 frames. ], batch size: 131, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:54:28,503 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.878e+02 4.638e+02 5.638e+02 7.017e+02 1.329e+03, threshold=1.128e+03, percent-clipped=4.0 +2023-03-28 04:55:03,132 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23529.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:55:48,493 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23549.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 04:56:12,796 INFO [train.py:892] (0/4) Epoch 13, batch 1300, loss[loss=0.3411, simple_loss=0.3855, pruned_loss=0.1483, over 19442.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.2803, pruned_loss=0.07729, over 3944609.41 frames. ], batch size: 396, lr: 1.21e-02, grad_scale: 16.0 +2023-03-28 04:56:18,752 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-28 04:56:39,735 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23573.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:57:08,673 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23585.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 04:57:11,059 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6849, 4.6528, 5.1075, 4.9112, 4.9660, 4.3109, 4.7509, 4.6843], + device='cuda:0'), covar=tensor([0.1138, 0.1173, 0.0847, 0.0974, 0.0742, 0.1010, 0.1878, 0.1815], + device='cuda:0'), in_proj_covar=tensor([0.0256, 0.0256, 0.0312, 0.0237, 0.0229, 0.0224, 0.0300, 0.0331], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 04:58:05,891 INFO [train.py:892] (0/4) Epoch 13, batch 1350, loss[loss=0.2112, simple_loss=0.2704, pruned_loss=0.07594, over 19858.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2798, pruned_loss=0.07718, over 3946473.24 frames. ], batch size: 106, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 04:58:08,085 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.191e+02 4.401e+02 5.493e+02 6.790e+02 1.400e+03, threshold=1.099e+03, percent-clipped=0.0 +2023-03-28 04:58:41,776 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7469, 3.1579, 4.8983, 4.0278, 4.5208, 4.7130, 4.6757, 4.4690], + device='cuda:0'), covar=tensor([0.0162, 0.0652, 0.0075, 0.0934, 0.0103, 0.0163, 0.0120, 0.0100], + device='cuda:0'), in_proj_covar=tensor([0.0082, 0.0091, 0.0073, 0.0144, 0.0067, 0.0081, 0.0076, 0.0066], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 04:58:52,073 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3759, 4.0021, 4.1356, 4.4875, 4.1642, 4.6016, 4.4324, 4.6559], + device='cuda:0'), covar=tensor([0.0742, 0.0471, 0.0580, 0.0336, 0.0687, 0.0411, 0.0778, 0.0482], + device='cuda:0'), in_proj_covar=tensor([0.0130, 0.0150, 0.0175, 0.0145, 0.0147, 0.0131, 0.0136, 0.0167], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 04:59:59,165 INFO [train.py:892] (0/4) Epoch 13, batch 1400, loss[loss=0.2176, simple_loss=0.2757, pruned_loss=0.07971, over 19820.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2811, pruned_loss=0.07831, over 3944945.38 frames. ], batch size: 204, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 05:00:10,214 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8227, 3.1193, 3.3216, 3.8498, 2.7143, 3.1428, 2.5106, 2.4432], + device='cuda:0'), covar=tensor([0.0458, 0.1876, 0.0844, 0.0250, 0.1857, 0.0620, 0.1199, 0.1524], + device='cuda:0'), in_proj_covar=tensor([0.0204, 0.0328, 0.0227, 0.0159, 0.0239, 0.0179, 0.0199, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 05:01:47,983 INFO [train.py:892] (0/4) Epoch 13, batch 1450, loss[loss=0.1999, simple_loss=0.2717, pruned_loss=0.06408, over 19762.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2815, pruned_loss=0.07795, over 3944070.02 frames. ], batch size: 49, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 05:01:50,146 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.049e+02 4.743e+02 5.551e+02 6.654e+02 1.229e+03, threshold=1.110e+03, percent-clipped=4.0 +2023-03-28 05:03:37,022 INFO [train.py:892] (0/4) Epoch 13, batch 1500, loss[loss=0.1926, simple_loss=0.2568, pruned_loss=0.06422, over 19839.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2804, pruned_loss=0.077, over 3947139.54 frames. ], batch size: 144, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 05:03:45,580 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23764.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:04:37,149 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.60 vs. limit=2.0 +2023-03-28 05:05:29,856 INFO [train.py:892] (0/4) Epoch 13, batch 1550, loss[loss=0.1894, simple_loss=0.2558, pruned_loss=0.06152, over 19872.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2809, pruned_loss=0.07731, over 3947889.54 frames. ], batch size: 108, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 05:05:31,562 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.081e+02 4.439e+02 5.194e+02 6.421e+02 1.590e+03, threshold=1.039e+03, percent-clipped=3.0 +2023-03-28 05:05:32,315 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23812.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:06:39,286 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23844.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 05:07:17,771 INFO [train.py:892] (0/4) Epoch 13, batch 1600, loss[loss=0.1633, simple_loss=0.2281, pruned_loss=0.04923, over 19834.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.282, pruned_loss=0.0786, over 3947941.96 frames. ], batch size: 128, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 05:07:45,128 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23873.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:08:12,045 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23885.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:09:11,696 INFO [train.py:892] (0/4) Epoch 13, batch 1650, loss[loss=0.2402, simple_loss=0.3014, pruned_loss=0.08951, over 19778.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2818, pruned_loss=0.07826, over 3946001.34 frames. ], batch size: 273, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 05:09:13,800 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.923e+02 4.526e+02 5.652e+02 6.938e+02 1.810e+03, threshold=1.130e+03, percent-clipped=2.0 +2023-03-28 05:09:35,329 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23921.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:10:02,305 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23933.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:11:04,317 INFO [train.py:892] (0/4) Epoch 13, batch 1700, loss[loss=0.2139, simple_loss=0.2877, pruned_loss=0.07007, over 19585.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2811, pruned_loss=0.0778, over 3947453.97 frames. ], batch size: 53, lr: 1.20e-02, grad_scale: 16.0 +2023-03-28 05:11:32,743 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5535, 4.2541, 4.3406, 4.0903, 4.4953, 3.1640, 3.8262, 2.3070], + device='cuda:0'), covar=tensor([0.0171, 0.0193, 0.0138, 0.0186, 0.0129, 0.0841, 0.0688, 0.1352], + device='cuda:0'), in_proj_covar=tensor([0.0088, 0.0123, 0.0102, 0.0117, 0.0103, 0.0122, 0.0131, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 05:12:30,264 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-24000.pt +2023-03-28 05:12:42,984 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7167, 3.1249, 3.4499, 3.4403, 3.9451, 4.0060, 4.5829, 5.0925], + device='cuda:0'), covar=tensor([0.0438, 0.1515, 0.1328, 0.1756, 0.1457, 0.1145, 0.0474, 0.0329], + device='cuda:0'), in_proj_covar=tensor([0.0216, 0.0219, 0.0241, 0.0236, 0.0265, 0.0233, 0.0188, 0.0196], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 05:12:44,874 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9966, 3.6121, 3.7870, 4.0838, 3.7963, 4.0032, 4.1553, 4.2533], + device='cuda:0'), covar=tensor([0.0653, 0.0415, 0.0489, 0.0289, 0.0570, 0.0455, 0.0365, 0.0308], + device='cuda:0'), in_proj_covar=tensor([0.0131, 0.0151, 0.0176, 0.0145, 0.0148, 0.0132, 0.0137, 0.0168], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 05:12:55,562 INFO [train.py:892] (0/4) Epoch 13, batch 1750, loss[loss=0.2346, simple_loss=0.2875, pruned_loss=0.09086, over 19734.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2789, pruned_loss=0.07666, over 3949019.54 frames. ], batch size: 269, lr: 1.19e-02, grad_scale: 16.0 +2023-03-28 05:12:57,443 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.884e+02 4.345e+02 5.223e+02 6.698e+02 1.111e+03, threshold=1.045e+03, percent-clipped=0.0 +2023-03-28 05:13:13,374 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0372, 2.7795, 3.1204, 2.8786, 3.2691, 3.2370, 3.9023, 4.2358], + device='cuda:0'), covar=tensor([0.0526, 0.1600, 0.1431, 0.1901, 0.1753, 0.1532, 0.0484, 0.0445], + device='cuda:0'), in_proj_covar=tensor([0.0216, 0.0219, 0.0240, 0.0236, 0.0264, 0.0233, 0.0188, 0.0196], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 05:13:45,912 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24037.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:13:45,988 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8311, 3.0054, 3.2033, 3.8471, 2.5662, 3.2064, 2.6490, 2.3140], + device='cuda:0'), covar=tensor([0.0473, 0.2235, 0.0933, 0.0293, 0.2168, 0.0686, 0.1161, 0.1788], + device='cuda:0'), in_proj_covar=tensor([0.0206, 0.0331, 0.0227, 0.0161, 0.0241, 0.0180, 0.0201, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 05:14:30,470 INFO [train.py:892] (0/4) Epoch 13, batch 1800, loss[loss=0.1853, simple_loss=0.2526, pruned_loss=0.05903, over 19930.00 frames. ], tot_loss[loss=0.215, simple_loss=0.278, pruned_loss=0.07596, over 3949851.71 frames. ], batch size: 51, lr: 1.19e-02, grad_scale: 16.0 +2023-03-28 05:15:37,730 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7529, 4.8277, 5.2027, 4.7014, 4.3716, 4.9147, 4.8320, 5.3775], + device='cuda:0'), covar=tensor([0.0824, 0.0267, 0.0319, 0.0304, 0.0538, 0.0337, 0.0367, 0.0249], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0193, 0.0192, 0.0199, 0.0186, 0.0195, 0.0195, 0.0179], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 05:15:37,791 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24098.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:16:01,668 INFO [train.py:892] (0/4) Epoch 13, batch 1850, loss[loss=0.2258, simple_loss=0.2967, pruned_loss=0.0774, over 19844.00 frames. ], tot_loss[loss=0.216, simple_loss=0.2799, pruned_loss=0.07607, over 3949776.37 frames. ], batch size: 58, lr: 1.19e-02, grad_scale: 16.0 +2023-03-28 05:16:03,701 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.093e+02 4.684e+02 5.642e+02 7.102e+02 1.280e+03, threshold=1.128e+03, percent-clipped=3.0 +2023-03-28 05:16:09,347 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-13.pt +2023-03-28 05:17:07,693 INFO [train.py:892] (0/4) Epoch 14, batch 0, loss[loss=0.1864, simple_loss=0.2524, pruned_loss=0.06017, over 19769.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2524, pruned_loss=0.06017, over 19769.00 frames. ], batch size: 116, lr: 1.15e-02, grad_scale: 16.0 +2023-03-28 05:17:07,695 INFO [train.py:917] (0/4) Computing validation loss +2023-03-28 05:17:42,314 INFO [train.py:926] (0/4) Epoch 14, validation: loss=0.1725, simple_loss=0.2522, pruned_loss=0.04642, over 2883724.00 frames. +2023-03-28 05:17:42,315 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-28 05:18:52,103 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=24144.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 05:19:45,555 INFO [train.py:892] (0/4) Epoch 14, batch 50, loss[loss=0.231, simple_loss=0.2874, pruned_loss=0.08725, over 19768.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2696, pruned_loss=0.07193, over 891711.89 frames. ], batch size: 280, lr: 1.15e-02, grad_scale: 16.0 +2023-03-28 05:20:44,928 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24192.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:21:30,227 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.134e+02 4.338e+02 5.278e+02 6.303e+02 1.236e+03, threshold=1.056e+03, percent-clipped=1.0 +2023-03-28 05:21:38,385 INFO [train.py:892] (0/4) Epoch 14, batch 100, loss[loss=0.1788, simple_loss=0.2518, pruned_loss=0.0529, over 19561.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2722, pruned_loss=0.07119, over 1569258.27 frames. ], batch size: 47, lr: 1.15e-02, grad_scale: 16.0 +2023-03-28 05:21:47,200 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8623, 3.1244, 3.2509, 3.8232, 2.6311, 3.2513, 2.6815, 2.3024], + device='cuda:0'), covar=tensor([0.0445, 0.2049, 0.0923, 0.0315, 0.2115, 0.0687, 0.1090, 0.1755], + device='cuda:0'), in_proj_covar=tensor([0.0206, 0.0333, 0.0229, 0.0163, 0.0242, 0.0181, 0.0201, 0.0209], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 05:23:30,570 INFO [train.py:892] (0/4) Epoch 14, batch 150, loss[loss=0.2336, simple_loss=0.2964, pruned_loss=0.08539, over 19722.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.272, pruned_loss=0.07178, over 2098727.53 frames. ], batch size: 269, lr: 1.15e-02, grad_scale: 16.0 +2023-03-28 05:24:17,925 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-28 05:25:15,402 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.679e+02 4.473e+02 5.483e+02 6.976e+02 1.116e+03, threshold=1.097e+03, percent-clipped=1.0 +2023-03-28 05:25:23,187 INFO [train.py:892] (0/4) Epoch 14, batch 200, loss[loss=0.2136, simple_loss=0.2743, pruned_loss=0.07643, over 19851.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2731, pruned_loss=0.07247, over 2509951.59 frames. ], batch size: 142, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:26:41,493 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.72 vs. limit=2.0 +2023-03-28 05:26:53,830 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8537, 5.2203, 5.2254, 5.1833, 4.8046, 5.2209, 4.6438, 4.7895], + device='cuda:0'), covar=tensor([0.0414, 0.0405, 0.0589, 0.0396, 0.0659, 0.0529, 0.0709, 0.0892], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0214, 0.0249, 0.0211, 0.0205, 0.0196, 0.0222, 0.0259], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 05:27:13,857 INFO [train.py:892] (0/4) Epoch 14, batch 250, loss[loss=0.217, simple_loss=0.2783, pruned_loss=0.07779, over 19780.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2735, pruned_loss=0.0729, over 2827959.06 frames. ], batch size: 215, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:28:09,128 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-03-28 05:28:15,146 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24393.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:28:28,202 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5531, 3.2575, 3.3623, 3.2914, 3.7349, 3.8409, 4.4349, 4.9140], + device='cuda:0'), covar=tensor([0.0502, 0.1548, 0.1547, 0.2016, 0.1734, 0.1283, 0.0523, 0.0426], + device='cuda:0'), in_proj_covar=tensor([0.0214, 0.0221, 0.0241, 0.0236, 0.0267, 0.0232, 0.0188, 0.0198], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 05:28:38,732 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4234, 2.6023, 3.6557, 2.9205, 3.1895, 3.1941, 1.9708, 2.2279], + device='cuda:0'), covar=tensor([0.0883, 0.2820, 0.0524, 0.0757, 0.1436, 0.1030, 0.2149, 0.2250], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0346, 0.0284, 0.0229, 0.0339, 0.0285, 0.0304, 0.0278], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 05:28:44,843 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3869, 2.4706, 3.6096, 2.8691, 3.1671, 3.0564, 1.9420, 2.0643], + device='cuda:0'), covar=tensor([0.0974, 0.3240, 0.0546, 0.0773, 0.1422, 0.1162, 0.2181, 0.2506], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0346, 0.0285, 0.0229, 0.0339, 0.0285, 0.0304, 0.0278], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 05:28:53,720 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.621e+02 4.746e+02 5.640e+02 6.808e+02 1.324e+03, threshold=1.128e+03, percent-clipped=3.0 +2023-03-28 05:29:04,963 INFO [train.py:892] (0/4) Epoch 14, batch 300, loss[loss=0.2344, simple_loss=0.292, pruned_loss=0.08838, over 19706.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2745, pruned_loss=0.07336, over 3077780.88 frames. ], batch size: 305, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:29:55,371 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7011, 3.1676, 3.6028, 3.3731, 3.8217, 3.8468, 4.6380, 5.0198], + device='cuda:0'), covar=tensor([0.0409, 0.1403, 0.1180, 0.1745, 0.1515, 0.1175, 0.0382, 0.0328], + device='cuda:0'), in_proj_covar=tensor([0.0216, 0.0222, 0.0243, 0.0238, 0.0268, 0.0233, 0.0190, 0.0199], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 05:30:41,069 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-28 05:30:56,508 INFO [train.py:892] (0/4) Epoch 14, batch 350, loss[loss=0.2337, simple_loss=0.2932, pruned_loss=0.08707, over 19786.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2745, pruned_loss=0.07338, over 3272373.50 frames. ], batch size: 280, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:31:06,255 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7265, 3.2634, 3.3761, 3.2028, 3.9007, 3.8990, 4.6291, 5.0722], + device='cuda:0'), covar=tensor([0.0450, 0.1457, 0.1547, 0.2091, 0.1602, 0.1261, 0.0384, 0.0311], + device='cuda:0'), in_proj_covar=tensor([0.0217, 0.0223, 0.0244, 0.0239, 0.0270, 0.0234, 0.0190, 0.0200], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 05:32:38,970 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.146e+02 4.463e+02 5.276e+02 6.433e+02 1.148e+03, threshold=1.055e+03, percent-clipped=1.0 +2023-03-28 05:32:48,484 INFO [train.py:892] (0/4) Epoch 14, batch 400, loss[loss=0.1967, simple_loss=0.2634, pruned_loss=0.06503, over 19723.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2748, pruned_loss=0.07348, over 3422996.19 frames. ], batch size: 104, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:32:57,886 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6317, 2.1420, 2.5573, 3.0781, 3.5679, 3.9182, 3.8344, 3.9267], + device='cuda:0'), covar=tensor([0.0863, 0.1941, 0.1378, 0.0577, 0.0397, 0.0193, 0.0240, 0.0278], + device='cuda:0'), in_proj_covar=tensor([0.0141, 0.0169, 0.0164, 0.0132, 0.0116, 0.0109, 0.0102, 0.0098], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 05:34:00,450 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24547.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:34:41,062 INFO [train.py:892] (0/4) Epoch 14, batch 450, loss[loss=0.1978, simple_loss=0.2662, pruned_loss=0.06472, over 19640.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2746, pruned_loss=0.07292, over 3540947.20 frames. ], batch size: 72, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:34:48,647 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24569.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:36:16,276 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24608.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:36:24,997 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.188e+02 4.392e+02 5.020e+02 6.221e+02 1.818e+03, threshold=1.004e+03, percent-clipped=1.0 +2023-03-28 05:36:33,456 INFO [train.py:892] (0/4) Epoch 14, batch 500, loss[loss=0.2148, simple_loss=0.2925, pruned_loss=0.06858, over 19616.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2741, pruned_loss=0.07294, over 3632306.06 frames. ], batch size: 51, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:37:04,327 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24630.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:37:04,642 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-28 05:37:10,344 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1092, 3.3521, 3.5682, 2.8765, 3.6592, 2.9865, 3.0671, 3.6887], + device='cuda:0'), covar=tensor([0.0486, 0.0287, 0.0488, 0.0595, 0.0301, 0.0310, 0.0473, 0.0193], + device='cuda:0'), in_proj_covar=tensor([0.0060, 0.0063, 0.0065, 0.0094, 0.0062, 0.0060, 0.0058, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 05:38:20,297 INFO [train.py:892] (0/4) Epoch 14, batch 550, loss[loss=0.2089, simple_loss=0.2763, pruned_loss=0.07077, over 19742.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2739, pruned_loss=0.07285, over 3703402.82 frames. ], batch size: 77, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:39:20,259 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=24693.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:39:57,874 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.918e+02 4.778e+02 5.297e+02 6.778e+02 1.181e+03, threshold=1.059e+03, percent-clipped=3.0 +2023-03-28 05:40:07,594 INFO [train.py:892] (0/4) Epoch 14, batch 600, loss[loss=0.2122, simple_loss=0.2955, pruned_loss=0.0645, over 19721.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2743, pruned_loss=0.073, over 3758840.40 frames. ], batch size: 54, lr: 1.14e-02, grad_scale: 16.0 +2023-03-28 05:40:35,793 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0299, 4.6009, 4.7152, 5.1396, 4.6870, 5.3639, 5.2661, 5.3599], + device='cuda:0'), covar=tensor([0.0692, 0.0370, 0.0393, 0.0272, 0.0640, 0.0320, 0.0373, 0.0354], + device='cuda:0'), in_proj_covar=tensor([0.0135, 0.0156, 0.0180, 0.0150, 0.0153, 0.0136, 0.0139, 0.0175], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 05:41:01,102 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24741.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:41:18,081 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-28 05:41:21,358 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6138, 2.5954, 1.4615, 3.0982, 2.7937, 3.0590, 3.1399, 2.4451], + device='cuda:0'), covar=tensor([0.0591, 0.0568, 0.1519, 0.0492, 0.0523, 0.0375, 0.0388, 0.0737], + device='cuda:0'), in_proj_covar=tensor([0.0126, 0.0123, 0.0134, 0.0129, 0.0110, 0.0107, 0.0123, 0.0128], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 05:41:54,222 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24765.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:41:55,318 INFO [train.py:892] (0/4) Epoch 14, batch 650, loss[loss=0.202, simple_loss=0.2711, pruned_loss=0.06651, over 19892.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.274, pruned_loss=0.07288, over 3801660.46 frames. ], batch size: 88, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:42:39,830 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9913, 2.7156, 3.0960, 2.8480, 3.3258, 3.1404, 3.8651, 4.1520], + device='cuda:0'), covar=tensor([0.0506, 0.1576, 0.1401, 0.1821, 0.1501, 0.1402, 0.0428, 0.0426], + device='cuda:0'), in_proj_covar=tensor([0.0215, 0.0220, 0.0240, 0.0234, 0.0266, 0.0232, 0.0187, 0.0198], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 05:42:49,553 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-28 05:43:34,883 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.281e+02 4.693e+02 5.521e+02 6.311e+02 1.002e+03, threshold=1.104e+03, percent-clipped=0.0 +2023-03-28 05:43:42,883 INFO [train.py:892] (0/4) Epoch 14, batch 700, loss[loss=0.1949, simple_loss=0.2557, pruned_loss=0.06705, over 19742.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2758, pruned_loss=0.07366, over 3833374.95 frames. ], batch size: 134, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:44:08,790 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24826.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:44:55,453 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24848.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:45:32,954 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1439, 3.0675, 1.8156, 4.0170, 3.4968, 3.9454, 4.0107, 2.9450], + device='cuda:0'), covar=tensor([0.0590, 0.0670, 0.1630, 0.0361, 0.0571, 0.0343, 0.0519, 0.0737], + device='cuda:0'), in_proj_covar=tensor([0.0125, 0.0122, 0.0133, 0.0127, 0.0108, 0.0105, 0.0121, 0.0127], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 05:45:36,125 INFO [train.py:892] (0/4) Epoch 14, batch 750, loss[loss=0.1814, simple_loss=0.2537, pruned_loss=0.05453, over 19847.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2747, pruned_loss=0.07308, over 3860113.46 frames. ], batch size: 56, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:46:58,200 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24903.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:47:10,813 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24909.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:47:18,006 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.878e+02 4.189e+02 5.333e+02 6.516e+02 1.195e+03, threshold=1.067e+03, percent-clipped=1.0 +2023-03-28 05:47:26,430 INFO [train.py:892] (0/4) Epoch 14, batch 800, loss[loss=0.1897, simple_loss=0.2573, pruned_loss=0.0611, over 19900.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.274, pruned_loss=0.07259, over 3881560.89 frames. ], batch size: 94, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:47:45,178 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24925.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:48:26,714 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1545, 3.1144, 1.7607, 3.9342, 3.4326, 3.8383, 4.0151, 2.8631], + device='cuda:0'), covar=tensor([0.0603, 0.0593, 0.1680, 0.0455, 0.0518, 0.0395, 0.0472, 0.0770], + device='cuda:0'), in_proj_covar=tensor([0.0126, 0.0122, 0.0133, 0.0128, 0.0108, 0.0105, 0.0122, 0.0127], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 05:49:15,224 INFO [train.py:892] (0/4) Epoch 14, batch 850, loss[loss=0.1936, simple_loss=0.2575, pruned_loss=0.06482, over 19892.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2751, pruned_loss=0.07335, over 3895366.38 frames. ], batch size: 91, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:49:55,016 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1279, 3.0935, 2.9850, 2.7505, 3.0547, 2.5290, 2.3447, 1.6167], + device='cuda:0'), covar=tensor([0.0277, 0.0246, 0.0197, 0.0219, 0.0194, 0.0772, 0.0794, 0.1643], + device='cuda:0'), in_proj_covar=tensor([0.0089, 0.0123, 0.0103, 0.0117, 0.0104, 0.0122, 0.0132, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 05:50:22,749 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24995.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:50:36,924 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1976, 3.4847, 3.3792, 4.3190, 2.6883, 3.1959, 2.7024, 2.4274], + device='cuda:0'), covar=tensor([0.0430, 0.2279, 0.0975, 0.0233, 0.2125, 0.0729, 0.1358, 0.1804], + device='cuda:0'), in_proj_covar=tensor([0.0208, 0.0333, 0.0230, 0.0161, 0.0240, 0.0182, 0.0204, 0.0210], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 05:51:00,271 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.835e+02 4.699e+02 5.581e+02 6.608e+02 1.039e+03, threshold=1.116e+03, percent-clipped=0.0 +2023-03-28 05:51:08,783 INFO [train.py:892] (0/4) Epoch 14, batch 900, loss[loss=0.1894, simple_loss=0.2573, pruned_loss=0.06071, over 19652.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2757, pruned_loss=0.07355, over 3905603.31 frames. ], batch size: 79, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:52:39,894 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25056.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 05:53:00,422 INFO [train.py:892] (0/4) Epoch 14, batch 950, loss[loss=0.2196, simple_loss=0.2756, pruned_loss=0.08183, over 19798.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2757, pruned_loss=0.07371, over 3916651.66 frames. ], batch size: 126, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:53:37,479 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4036, 2.5760, 3.5932, 2.8436, 3.2085, 3.0850, 1.9573, 2.1244], + device='cuda:0'), covar=tensor([0.0800, 0.2601, 0.0509, 0.0709, 0.1258, 0.1095, 0.2026, 0.2276], + device='cuda:0'), in_proj_covar=tensor([0.0318, 0.0349, 0.0286, 0.0232, 0.0341, 0.0290, 0.0309, 0.0282], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 05:54:42,043 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.968e+02 4.212e+02 5.107e+02 6.466e+02 1.651e+03, threshold=1.021e+03, percent-clipped=2.0 +2023-03-28 05:54:50,390 INFO [train.py:892] (0/4) Epoch 14, batch 1000, loss[loss=0.2089, simple_loss=0.2682, pruned_loss=0.07479, over 19784.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2747, pruned_loss=0.07346, over 3924089.94 frames. ], batch size: 168, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:55:03,266 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25121.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:55:36,773 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0688, 3.0204, 4.4182, 3.7999, 4.2411, 4.3148, 4.3544, 4.0954], + device='cuda:0'), covar=tensor([0.0242, 0.0660, 0.0090, 0.0920, 0.0102, 0.0230, 0.0133, 0.0136], + device='cuda:0'), in_proj_covar=tensor([0.0081, 0.0090, 0.0074, 0.0143, 0.0067, 0.0082, 0.0076, 0.0067], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 05:56:40,706 INFO [train.py:892] (0/4) Epoch 14, batch 1050, loss[loss=0.2192, simple_loss=0.2827, pruned_loss=0.07792, over 19830.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2752, pruned_loss=0.07392, over 3930210.06 frames. ], batch size: 208, lr: 1.13e-02, grad_scale: 16.0 +2023-03-28 05:58:05,151 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25203.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:58:07,140 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25204.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:58:07,481 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5130, 3.1297, 3.3902, 3.2903, 3.8297, 3.7383, 4.3856, 4.8157], + device='cuda:0'), covar=tensor([0.0465, 0.1386, 0.1309, 0.1714, 0.1337, 0.1114, 0.0434, 0.0328], + device='cuda:0'), in_proj_covar=tensor([0.0218, 0.0222, 0.0243, 0.0237, 0.0268, 0.0233, 0.0190, 0.0201], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 05:58:23,775 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.002e+02 4.493e+02 5.579e+02 6.984e+02 1.281e+03, threshold=1.116e+03, percent-clipped=2.0 +2023-03-28 05:58:32,627 INFO [train.py:892] (0/4) Epoch 14, batch 1100, loss[loss=0.1945, simple_loss=0.2632, pruned_loss=0.06288, over 19746.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2779, pruned_loss=0.07451, over 3929830.00 frames. ], batch size: 97, lr: 1.12e-02, grad_scale: 16.0 +2023-03-28 05:58:53,554 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25225.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 05:59:47,341 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-28 05:59:55,584 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25251.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:00:27,168 INFO [train.py:892] (0/4) Epoch 14, batch 1150, loss[loss=0.1873, simple_loss=0.2521, pruned_loss=0.06127, over 19594.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2771, pruned_loss=0.07462, over 3935022.81 frames. ], batch size: 45, lr: 1.12e-02, grad_scale: 16.0 +2023-03-28 06:00:45,924 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25273.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:02:10,587 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.130e+02 4.700e+02 5.345e+02 6.282e+02 1.184e+03, threshold=1.069e+03, percent-clipped=1.0 +2023-03-28 06:02:18,768 INFO [train.py:892] (0/4) Epoch 14, batch 1200, loss[loss=0.1942, simple_loss=0.2596, pruned_loss=0.06442, over 19738.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.277, pruned_loss=0.07469, over 3938592.78 frames. ], batch size: 71, lr: 1.12e-02, grad_scale: 16.0 +2023-03-28 06:03:35,521 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25351.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 06:04:08,264 INFO [train.py:892] (0/4) Epoch 14, batch 1250, loss[loss=0.2512, simple_loss=0.3106, pruned_loss=0.09586, over 19639.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2756, pruned_loss=0.07413, over 3940042.72 frames. ], batch size: 343, lr: 1.12e-02, grad_scale: 32.0 +2023-03-28 06:05:25,206 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9389, 3.6220, 3.7572, 3.9655, 3.7628, 4.0284, 4.1252, 4.2180], + device='cuda:0'), covar=tensor([0.0690, 0.0415, 0.0440, 0.0302, 0.0600, 0.0406, 0.0359, 0.0299], + device='cuda:0'), in_proj_covar=tensor([0.0131, 0.0152, 0.0176, 0.0147, 0.0149, 0.0131, 0.0136, 0.0170], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 06:05:51,971 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.364e+02 4.665e+02 5.361e+02 6.465e+02 1.187e+03, threshold=1.072e+03, percent-clipped=1.0 +2023-03-28 06:06:00,925 INFO [train.py:892] (0/4) Epoch 14, batch 1300, loss[loss=0.2369, simple_loss=0.2972, pruned_loss=0.08825, over 19784.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2762, pruned_loss=0.07423, over 3941505.42 frames. ], batch size: 224, lr: 1.12e-02, grad_scale: 32.0 +2023-03-28 06:06:14,788 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25421.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:06:30,843 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25428.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 06:07:56,762 INFO [train.py:892] (0/4) Epoch 14, batch 1350, loss[loss=0.1941, simple_loss=0.2765, pruned_loss=0.05584, over 19672.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2746, pruned_loss=0.07312, over 3943944.48 frames. ], batch size: 51, lr: 1.12e-02, grad_scale: 32.0 +2023-03-28 06:08:04,343 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25469.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:08:31,075 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-28 06:08:47,342 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.79 vs. limit=5.0 +2023-03-28 06:08:50,160 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25489.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 06:09:23,667 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25504.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:09:38,483 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.060e+02 4.588e+02 5.403e+02 6.911e+02 1.423e+03, threshold=1.081e+03, percent-clipped=2.0 +2023-03-28 06:09:46,921 INFO [train.py:892] (0/4) Epoch 14, batch 1400, loss[loss=0.3602, simple_loss=0.403, pruned_loss=0.1587, over 19251.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.274, pruned_loss=0.07292, over 3946199.05 frames. ], batch size: 483, lr: 1.12e-02, grad_scale: 32.0 +2023-03-28 06:11:11,881 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25552.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:11:39,075 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1033, 3.2537, 3.5632, 2.6276, 3.6779, 2.9498, 3.0544, 3.6644], + device='cuda:0'), covar=tensor([0.0758, 0.0308, 0.0618, 0.0738, 0.0373, 0.0329, 0.0405, 0.0193], + device='cuda:0'), in_proj_covar=tensor([0.0061, 0.0064, 0.0066, 0.0094, 0.0063, 0.0060, 0.0059, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 06:11:41,925 INFO [train.py:892] (0/4) Epoch 14, batch 1450, loss[loss=0.218, simple_loss=0.2812, pruned_loss=0.07741, over 19643.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2742, pruned_loss=0.07247, over 3946160.91 frames. ], batch size: 66, lr: 1.12e-02, grad_scale: 32.0 +2023-03-28 06:13:26,191 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.132e+02 4.450e+02 5.479e+02 6.667e+02 1.081e+03, threshold=1.096e+03, percent-clipped=1.0 +2023-03-28 06:13:32,608 INFO [train.py:892] (0/4) Epoch 14, batch 1500, loss[loss=0.2361, simple_loss=0.2983, pruned_loss=0.08695, over 19699.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2751, pruned_loss=0.07269, over 3945011.75 frames. ], batch size: 265, lr: 1.12e-02, grad_scale: 16.0 +2023-03-28 06:14:51,749 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25651.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 06:15:22,138 INFO [train.py:892] (0/4) Epoch 14, batch 1550, loss[loss=0.2245, simple_loss=0.2998, pruned_loss=0.0746, over 19580.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2763, pruned_loss=0.07339, over 3945814.35 frames. ], batch size: 49, lr: 1.11e-02, grad_scale: 16.0 +2023-03-28 06:16:39,077 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25699.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:16:56,652 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.4746, 5.8201, 5.8241, 5.7239, 5.3993, 5.7814, 5.1007, 5.1839], + device='cuda:0'), covar=tensor([0.0394, 0.0436, 0.0540, 0.0456, 0.0591, 0.0555, 0.0661, 0.0928], + device='cuda:0'), in_proj_covar=tensor([0.0218, 0.0221, 0.0255, 0.0219, 0.0212, 0.0202, 0.0229, 0.0264], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 06:17:02,899 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-03-28 06:17:12,374 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.797e+02 4.271e+02 5.006e+02 6.472e+02 1.345e+03, threshold=1.001e+03, percent-clipped=3.0 +2023-03-28 06:17:13,805 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-28 06:17:19,582 INFO [train.py:892] (0/4) Epoch 14, batch 1600, loss[loss=0.21, simple_loss=0.279, pruned_loss=0.07049, over 19852.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2746, pruned_loss=0.07198, over 3947169.88 frames. ], batch size: 60, lr: 1.11e-02, grad_scale: 16.0 +2023-03-28 06:18:01,761 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9984, 2.9303, 4.4788, 3.8533, 4.2468, 4.3228, 4.3314, 4.0388], + device='cuda:0'), covar=tensor([0.0236, 0.0724, 0.0080, 0.0742, 0.0098, 0.0188, 0.0103, 0.0138], + device='cuda:0'), in_proj_covar=tensor([0.0082, 0.0092, 0.0073, 0.0143, 0.0068, 0.0083, 0.0076, 0.0068], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0004, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 06:19:09,494 INFO [train.py:892] (0/4) Epoch 14, batch 1650, loss[loss=0.1951, simple_loss=0.2532, pruned_loss=0.06847, over 19747.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2745, pruned_loss=0.07238, over 3947668.59 frames. ], batch size: 140, lr: 1.11e-02, grad_scale: 16.0 +2023-03-28 06:19:49,865 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25784.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 06:20:52,546 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.972e+02 4.640e+02 5.535e+02 6.802e+02 1.148e+03, threshold=1.107e+03, percent-clipped=2.0 +2023-03-28 06:21:01,424 INFO [train.py:892] (0/4) Epoch 14, batch 1700, loss[loss=0.2279, simple_loss=0.2839, pruned_loss=0.08596, over 19798.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2762, pruned_loss=0.07397, over 3946492.08 frames. ], batch size: 231, lr: 1.11e-02, grad_scale: 16.0 +2023-03-28 06:22:29,008 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25858.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 06:22:38,664 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8230, 2.2727, 2.8656, 3.3531, 3.8570, 4.1870, 3.9473, 4.2688], + device='cuda:0'), covar=tensor([0.0885, 0.1774, 0.1221, 0.0491, 0.0285, 0.0161, 0.0273, 0.0273], + device='cuda:0'), in_proj_covar=tensor([0.0145, 0.0168, 0.0167, 0.0132, 0.0116, 0.0109, 0.0106, 0.0099], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 06:22:43,569 INFO [train.py:892] (0/4) Epoch 14, batch 1750, loss[loss=0.1881, simple_loss=0.2557, pruned_loss=0.06025, over 19870.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2759, pruned_loss=0.0735, over 3946806.36 frames. ], batch size: 46, lr: 1.11e-02, grad_scale: 16.0 +2023-03-28 06:22:46,955 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-28 06:24:15,394 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.161e+02 4.534e+02 5.430e+02 6.216e+02 1.247e+03, threshold=1.086e+03, percent-clipped=3.0 +2023-03-28 06:24:21,529 INFO [train.py:892] (0/4) Epoch 14, batch 1800, loss[loss=0.2042, simple_loss=0.2814, pruned_loss=0.06346, over 19531.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2743, pruned_loss=0.07225, over 3948260.46 frames. ], batch size: 54, lr: 1.11e-02, grad_scale: 16.0 +2023-03-28 06:24:27,961 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25919.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 06:24:35,755 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4336, 2.8427, 2.2542, 1.7447, 2.4367, 2.6762, 2.6884, 2.6544], + device='cuda:0'), covar=tensor([0.0228, 0.0178, 0.0274, 0.0566, 0.0347, 0.0190, 0.0188, 0.0184], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0067, 0.0075, 0.0082, 0.0086, 0.0059, 0.0056, 0.0059], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-28 06:25:52,592 INFO [train.py:892] (0/4) Epoch 14, batch 1850, loss[loss=0.2171, simple_loss=0.297, pruned_loss=0.06857, over 19840.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.2769, pruned_loss=0.07235, over 3947595.35 frames. ], batch size: 58, lr: 1.11e-02, grad_scale: 16.0 +2023-03-28 06:26:00,551 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-14.pt +2023-03-28 06:26:59,294 INFO [train.py:892] (0/4) Epoch 15, batch 0, loss[loss=0.2049, simple_loss=0.2573, pruned_loss=0.07627, over 19763.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2573, pruned_loss=0.07627, over 19763.00 frames. ], batch size: 205, lr: 1.07e-02, grad_scale: 16.0 +2023-03-28 06:26:59,295 INFO [train.py:917] (0/4) Computing validation loss +2023-03-28 06:27:35,344 INFO [train.py:926] (0/4) Epoch 15, validation: loss=0.1719, simple_loss=0.2516, pruned_loss=0.0461, over 2883724.00 frames. +2023-03-28 06:27:35,345 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-28 06:28:30,936 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-03-28 06:28:41,210 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-26000.pt +2023-03-28 06:29:05,170 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-28 06:29:17,376 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.761e+02 4.443e+02 5.358e+02 6.382e+02 1.006e+03, threshold=1.072e+03, percent-clipped=0.0 +2023-03-28 06:29:35,980 INFO [train.py:892] (0/4) Epoch 15, batch 50, loss[loss=0.2076, simple_loss=0.278, pruned_loss=0.06858, over 19745.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2703, pruned_loss=0.07103, over 889513.21 frames. ], batch size: 100, lr: 1.07e-02, grad_scale: 16.0 +2023-03-28 06:29:53,153 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1189, 2.8818, 3.1949, 2.8980, 3.3626, 3.2638, 3.9357, 4.3287], + device='cuda:0'), covar=tensor([0.0573, 0.1724, 0.1370, 0.2040, 0.1854, 0.1533, 0.0494, 0.0464], + device='cuda:0'), in_proj_covar=tensor([0.0224, 0.0225, 0.0249, 0.0241, 0.0274, 0.0238, 0.0194, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 06:31:24,918 INFO [train.py:892] (0/4) Epoch 15, batch 100, loss[loss=0.1998, simple_loss=0.2596, pruned_loss=0.07003, over 19854.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2718, pruned_loss=0.07158, over 1566705.85 frames. ], batch size: 197, lr: 1.07e-02, grad_scale: 16.0 +2023-03-28 06:31:55,575 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26084.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 06:32:34,985 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26102.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:32:57,733 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.557e+02 4.232e+02 5.319e+02 6.491e+02 1.379e+03, threshold=1.064e+03, percent-clipped=5.0 +2023-03-28 06:33:15,666 INFO [train.py:892] (0/4) Epoch 15, batch 150, loss[loss=0.2037, simple_loss=0.2674, pruned_loss=0.07, over 19780.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2689, pruned_loss=0.06939, over 2094763.75 frames. ], batch size: 213, lr: 1.07e-02, grad_scale: 16.0 +2023-03-28 06:33:41,644 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26132.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 06:34:52,823 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26163.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:35:03,839 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26168.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:35:09,203 INFO [train.py:892] (0/4) Epoch 15, batch 200, loss[loss=0.2075, simple_loss=0.2644, pruned_loss=0.0753, over 19767.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2729, pruned_loss=0.07174, over 2505080.79 frames. ], batch size: 198, lr: 1.07e-02, grad_scale: 16.0 +2023-03-28 06:35:33,333 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0029, 3.2297, 3.4997, 4.0939, 2.7277, 3.1641, 2.6250, 2.4781], + device='cuda:0'), covar=tensor([0.0479, 0.2535, 0.0891, 0.0283, 0.2167, 0.0765, 0.1334, 0.1918], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0329, 0.0230, 0.0163, 0.0239, 0.0181, 0.0200, 0.0210], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 06:36:44,911 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.302e+02 4.298e+02 5.201e+02 6.207e+02 1.187e+03, threshold=1.040e+03, percent-clipped=4.0 +2023-03-28 06:36:48,116 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26214.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 06:37:02,573 INFO [train.py:892] (0/4) Epoch 15, batch 250, loss[loss=0.3872, simple_loss=0.4129, pruned_loss=0.1808, over 19245.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2727, pruned_loss=0.07188, over 2824808.98 frames. ], batch size: 452, lr: 1.07e-02, grad_scale: 16.0 +2023-03-28 06:37:24,871 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26229.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:38:12,211 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26250.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:38:58,534 INFO [train.py:892] (0/4) Epoch 15, batch 300, loss[loss=0.189, simple_loss=0.2561, pruned_loss=0.06099, over 19864.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2733, pruned_loss=0.07192, over 3074669.49 frames. ], batch size: 89, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:38:59,848 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.72 vs. limit=5.0 +2023-03-28 06:40:19,718 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.6545, 5.9226, 6.0252, 5.8739, 5.7364, 5.9670, 5.2999, 5.3172], + device='cuda:0'), covar=tensor([0.0382, 0.0435, 0.0508, 0.0383, 0.0588, 0.0550, 0.0622, 0.0888], + device='cuda:0'), in_proj_covar=tensor([0.0217, 0.0221, 0.0252, 0.0217, 0.0211, 0.0201, 0.0230, 0.0264], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 06:40:32,447 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26311.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:40:36,421 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.754e+02 4.352e+02 4.963e+02 6.294e+02 9.634e+02, threshold=9.926e+02, percent-clipped=0.0 +2023-03-28 06:40:55,204 INFO [train.py:892] (0/4) Epoch 15, batch 350, loss[loss=0.2644, simple_loss=0.3195, pruned_loss=0.1047, over 19625.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2727, pruned_loss=0.0712, over 3268390.04 frames. ], batch size: 351, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:42:49,747 INFO [train.py:892] (0/4) Epoch 15, batch 400, loss[loss=0.2132, simple_loss=0.2809, pruned_loss=0.07273, over 19794.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2724, pruned_loss=0.07091, over 3419064.70 frames. ], batch size: 173, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:43:52,021 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-28 06:44:23,282 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.029e+02 4.352e+02 5.044e+02 6.255e+02 1.053e+03, threshold=1.009e+03, percent-clipped=1.0 +2023-03-28 06:44:28,506 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26414.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:44:43,041 INFO [train.py:892] (0/4) Epoch 15, batch 450, loss[loss=0.2286, simple_loss=0.2871, pruned_loss=0.08502, over 19747.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2725, pruned_loss=0.0711, over 3536949.78 frames. ], batch size: 259, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:46:07,554 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26458.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:46:35,452 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6515, 2.7286, 4.1625, 3.7322, 4.0474, 4.1695, 4.0552, 3.9242], + device='cuda:0'), covar=tensor([0.0291, 0.0759, 0.0093, 0.0736, 0.0116, 0.0205, 0.0144, 0.0132], + device='cuda:0'), in_proj_covar=tensor([0.0082, 0.0092, 0.0075, 0.0146, 0.0069, 0.0085, 0.0077, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 06:46:36,547 INFO [train.py:892] (0/4) Epoch 15, batch 500, loss[loss=0.2165, simple_loss=0.2786, pruned_loss=0.07717, over 19898.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2718, pruned_loss=0.07068, over 3629617.15 frames. ], batch size: 113, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:46:46,232 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4250, 1.7566, 2.2466, 2.7978, 3.1017, 3.2265, 3.2146, 3.3410], + device='cuda:0'), covar=tensor([0.1036, 0.2117, 0.1469, 0.0630, 0.0451, 0.0325, 0.0369, 0.0300], + device='cuda:0'), in_proj_covar=tensor([0.0146, 0.0174, 0.0170, 0.0136, 0.0118, 0.0112, 0.0109, 0.0102], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 06:46:46,277 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26475.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:48:13,615 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.684e+02 4.387e+02 5.126e+02 6.696e+02 1.197e+03, threshold=1.025e+03, percent-clipped=1.0 +2023-03-28 06:48:16,598 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26514.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 06:48:30,973 INFO [train.py:892] (0/4) Epoch 15, batch 550, loss[loss=0.2405, simple_loss=0.2929, pruned_loss=0.09411, over 19790.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2713, pruned_loss=0.07027, over 3702276.59 frames. ], batch size: 247, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:48:40,875 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26524.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:49:29,412 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7239, 2.1822, 2.6406, 2.9827, 3.4642, 3.6683, 3.6818, 3.7256], + device='cuda:0'), covar=tensor([0.0857, 0.1709, 0.1151, 0.0633, 0.0363, 0.0270, 0.0301, 0.0417], + device='cuda:0'), in_proj_covar=tensor([0.0145, 0.0171, 0.0169, 0.0136, 0.0118, 0.0112, 0.0108, 0.0103], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 06:50:06,373 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26562.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 06:50:26,046 INFO [train.py:892] (0/4) Epoch 15, batch 600, loss[loss=0.1972, simple_loss=0.2636, pruned_loss=0.06545, over 19800.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2741, pruned_loss=0.07184, over 3754891.56 frames. ], batch size: 200, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:51:44,802 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26606.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:52:01,001 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.578e+02 4.242e+02 5.300e+02 6.521e+02 1.210e+03, threshold=1.060e+03, percent-clipped=1.0 +2023-03-28 06:52:21,120 INFO [train.py:892] (0/4) Epoch 15, batch 650, loss[loss=0.2292, simple_loss=0.2827, pruned_loss=0.08784, over 19826.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2745, pruned_loss=0.0724, over 3798870.66 frames. ], batch size: 121, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:52:52,468 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3780, 2.5797, 3.6133, 2.6992, 3.0565, 3.1051, 1.9874, 2.1785], + device='cuda:0'), covar=tensor([0.0899, 0.2566, 0.0517, 0.0885, 0.1606, 0.1082, 0.2025, 0.2327], + device='cuda:0'), in_proj_covar=tensor([0.0319, 0.0348, 0.0293, 0.0236, 0.0346, 0.0297, 0.0311, 0.0282], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 06:53:18,503 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.52 vs. limit=5.0 +2023-03-28 06:54:12,421 INFO [train.py:892] (0/4) Epoch 15, batch 700, loss[loss=0.2255, simple_loss=0.2847, pruned_loss=0.08311, over 19789.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2738, pruned_loss=0.07163, over 3831122.23 frames. ], batch size: 213, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:55:22,422 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.65 vs. limit=5.0 +2023-03-28 06:55:26,557 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8110, 2.5752, 2.9052, 2.7400, 3.0940, 3.0048, 3.6713, 3.8847], + device='cuda:0'), covar=tensor([0.0604, 0.1647, 0.1593, 0.1830, 0.1628, 0.1421, 0.0526, 0.0535], + device='cuda:0'), in_proj_covar=tensor([0.0223, 0.0224, 0.0246, 0.0240, 0.0272, 0.0237, 0.0196, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 06:55:53,017 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.991e+02 4.486e+02 5.723e+02 6.688e+02 1.072e+03, threshold=1.145e+03, percent-clipped=1.0 +2023-03-28 06:55:56,486 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0405, 4.0733, 2.3964, 4.3517, 4.4951, 1.8728, 3.7787, 3.3943], + device='cuda:0'), covar=tensor([0.0608, 0.0839, 0.2617, 0.0602, 0.0424, 0.3059, 0.0951, 0.0675], + device='cuda:0'), in_proj_covar=tensor([0.0206, 0.0230, 0.0216, 0.0225, 0.0192, 0.0198, 0.0224, 0.0171], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 06:55:58,541 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7705, 2.0213, 2.8060, 3.0733, 3.6569, 4.0390, 3.9664, 4.0055], + device='cuda:0'), covar=tensor([0.0850, 0.1947, 0.1182, 0.0571, 0.0300, 0.0178, 0.0222, 0.0339], + device='cuda:0'), in_proj_covar=tensor([0.0144, 0.0170, 0.0168, 0.0135, 0.0116, 0.0110, 0.0107, 0.0101], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 06:56:12,164 INFO [train.py:892] (0/4) Epoch 15, batch 750, loss[loss=0.2062, simple_loss=0.2794, pruned_loss=0.06654, over 19867.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2746, pruned_loss=0.07219, over 3857725.38 frames. ], batch size: 77, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:57:34,805 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26758.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:57:51,716 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4020, 3.5634, 5.0229, 3.8998, 4.2265, 4.0369, 2.5613, 2.9048], + device='cuda:0'), covar=tensor([0.0700, 0.2320, 0.0367, 0.0653, 0.1299, 0.0911, 0.1895, 0.2060], + device='cuda:0'), in_proj_covar=tensor([0.0323, 0.0351, 0.0294, 0.0237, 0.0349, 0.0300, 0.0313, 0.0285], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 06:58:01,854 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26770.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:58:03,011 INFO [train.py:892] (0/4) Epoch 15, batch 800, loss[loss=0.1905, simple_loss=0.2694, pruned_loss=0.05577, over 19809.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2729, pruned_loss=0.07156, over 3879317.55 frames. ], batch size: 82, lr: 1.06e-02, grad_scale: 16.0 +2023-03-28 06:59:23,088 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26806.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 06:59:36,948 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.366e+02 4.760e+02 5.593e+02 6.609e+02 1.473e+03, threshold=1.119e+03, percent-clipped=2.0 +2023-03-28 06:59:56,340 INFO [train.py:892] (0/4) Epoch 15, batch 850, loss[loss=0.2089, simple_loss=0.2838, pruned_loss=0.06698, over 19671.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2716, pruned_loss=0.07058, over 3896150.13 frames. ], batch size: 51, lr: 1.05e-02, grad_scale: 16.0 +2023-03-28 07:00:03,292 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26824.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:01:47,500 INFO [train.py:892] (0/4) Epoch 15, batch 900, loss[loss=0.1963, simple_loss=0.2634, pruned_loss=0.06463, over 19876.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2704, pruned_loss=0.06983, over 3908781.77 frames. ], batch size: 47, lr: 1.05e-02, grad_scale: 16.0 +2023-03-28 07:01:50,679 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26872.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:03:02,958 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26904.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 07:03:07,289 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26906.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:03:23,387 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.859e+02 4.358e+02 4.979e+02 5.962e+02 9.737e+02, threshold=9.958e+02, percent-clipped=0.0 +2023-03-28 07:03:41,702 INFO [train.py:892] (0/4) Epoch 15, batch 950, loss[loss=0.2116, simple_loss=0.2813, pruned_loss=0.07096, over 19885.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2696, pruned_loss=0.06941, over 3918732.28 frames. ], batch size: 77, lr: 1.05e-02, grad_scale: 16.0 +2023-03-28 07:04:47,356 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26950.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:04:57,008 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26954.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:05:18,823 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26965.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 07:05:32,184 INFO [train.py:892] (0/4) Epoch 15, batch 1000, loss[loss=0.1978, simple_loss=0.2589, pruned_loss=0.06832, over 19816.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2706, pruned_loss=0.07035, over 3925212.90 frames. ], batch size: 202, lr: 1.05e-02, grad_scale: 16.0 +2023-03-28 07:06:07,350 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2368, 3.5903, 3.8910, 3.5196, 3.5509, 3.8745, 3.5930, 3.9962], + device='cuda:0'), covar=tensor([0.1619, 0.0461, 0.0566, 0.0512, 0.1284, 0.0605, 0.0590, 0.0480], + device='cuda:0'), in_proj_covar=tensor([0.0260, 0.0201, 0.0197, 0.0204, 0.0195, 0.0206, 0.0204, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 07:07:01,871 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27011.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:07:05,035 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.353e+02 4.723e+02 5.899e+02 7.505e+02 1.756e+03, threshold=1.180e+03, percent-clipped=5.0 +2023-03-28 07:07:24,349 INFO [train.py:892] (0/4) Epoch 15, batch 1050, loss[loss=0.18, simple_loss=0.2489, pruned_loss=0.05558, over 19773.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2709, pruned_loss=0.07072, over 3931528.63 frames. ], batch size: 116, lr: 1.05e-02, grad_scale: 16.0 +2023-03-28 07:07:46,281 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-28 07:09:11,705 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27070.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:09:13,205 INFO [train.py:892] (0/4) Epoch 15, batch 1100, loss[loss=0.2144, simple_loss=0.2813, pruned_loss=0.07378, over 19750.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2722, pruned_loss=0.07118, over 3935546.92 frames. ], batch size: 250, lr: 1.05e-02, grad_scale: 16.0 +2023-03-28 07:10:37,091 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-28 07:10:49,067 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.986e+02 4.421e+02 5.325e+02 6.263e+02 1.209e+03, threshold=1.065e+03, percent-clipped=1.0 +2023-03-28 07:11:00,537 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27118.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:11:05,791 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1630, 2.3179, 2.4649, 2.3268, 2.2857, 2.4551, 2.2506, 2.5321], + device='cuda:0'), covar=tensor([0.0224, 0.0265, 0.0278, 0.0204, 0.0307, 0.0229, 0.0349, 0.0256], + device='cuda:0'), in_proj_covar=tensor([0.0055, 0.0053, 0.0056, 0.0049, 0.0060, 0.0057, 0.0073, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-03-28 07:11:06,884 INFO [train.py:892] (0/4) Epoch 15, batch 1150, loss[loss=0.1739, simple_loss=0.2388, pruned_loss=0.05445, over 19707.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2717, pruned_loss=0.07081, over 3939732.52 frames. ], batch size: 101, lr: 1.05e-02, grad_scale: 8.0 +2023-03-28 07:12:06,814 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27148.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:12:58,801 INFO [train.py:892] (0/4) Epoch 15, batch 1200, loss[loss=0.1879, simple_loss=0.2553, pruned_loss=0.06023, over 19557.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2714, pruned_loss=0.07062, over 3942920.56 frames. ], batch size: 41, lr: 1.05e-02, grad_scale: 8.0 +2023-03-28 07:13:18,010 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.82 vs. limit=2.0 +2023-03-28 07:14:25,594 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27209.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:14:34,159 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.952e+02 4.645e+02 5.567e+02 6.439e+02 1.177e+03, threshold=1.113e+03, percent-clipped=3.0 +2023-03-28 07:14:49,437 INFO [train.py:892] (0/4) Epoch 15, batch 1250, loss[loss=0.2497, simple_loss=0.304, pruned_loss=0.09768, over 19761.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2707, pruned_loss=0.07028, over 3943451.53 frames. ], batch size: 276, lr: 1.05e-02, grad_scale: 8.0 +2023-03-28 07:16:06,795 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-03-28 07:16:16,185 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27260.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 07:16:40,192 INFO [train.py:892] (0/4) Epoch 15, batch 1300, loss[loss=0.1991, simple_loss=0.2714, pruned_loss=0.06341, over 19724.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2713, pruned_loss=0.07066, over 3944054.33 frames. ], batch size: 50, lr: 1.05e-02, grad_scale: 8.0 +2023-03-28 07:17:19,296 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2731, 3.2049, 3.6541, 2.6901, 3.8698, 3.1850, 3.0971, 3.7254], + device='cuda:0'), covar=tensor([0.0727, 0.0350, 0.0502, 0.0703, 0.0286, 0.0337, 0.0511, 0.0311], + device='cuda:0'), in_proj_covar=tensor([0.0062, 0.0066, 0.0066, 0.0095, 0.0062, 0.0062, 0.0060, 0.0053], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 07:18:00,038 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27306.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:18:17,024 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.149e+02 4.572e+02 5.387e+02 6.367e+02 1.430e+03, threshold=1.077e+03, percent-clipped=1.0 +2023-03-28 07:18:20,701 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-28 07:18:32,964 INFO [train.py:892] (0/4) Epoch 15, batch 1350, loss[loss=0.1855, simple_loss=0.2598, pruned_loss=0.05561, over 19647.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2712, pruned_loss=0.07026, over 3945534.75 frames. ], batch size: 79, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:20:15,294 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9596, 3.0197, 4.4485, 3.1298, 3.6862, 3.6572, 2.2631, 2.5307], + device='cuda:0'), covar=tensor([0.0849, 0.2781, 0.0432, 0.0862, 0.1429, 0.0963, 0.2071, 0.2310], + device='cuda:0'), in_proj_covar=tensor([0.0320, 0.0353, 0.0295, 0.0238, 0.0347, 0.0300, 0.0314, 0.0285], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 07:20:21,485 INFO [train.py:892] (0/4) Epoch 15, batch 1400, loss[loss=0.1954, simple_loss=0.2536, pruned_loss=0.06863, over 19860.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2726, pruned_loss=0.07095, over 3945841.89 frames. ], batch size: 106, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:20:28,455 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.58 vs. limit=5.0 +2023-03-28 07:21:59,974 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.359e+02 4.655e+02 5.654e+02 6.548e+02 1.649e+03, threshold=1.131e+03, percent-clipped=3.0 +2023-03-28 07:22:14,695 INFO [train.py:892] (0/4) Epoch 15, batch 1450, loss[loss=0.1894, simple_loss=0.2575, pruned_loss=0.06063, over 19784.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2736, pruned_loss=0.07139, over 3946513.62 frames. ], batch size: 116, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:22:50,464 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-28 07:23:45,249 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0697, 4.2022, 4.6705, 4.1611, 4.1176, 4.5861, 4.3244, 4.8286], + device='cuda:0'), covar=tensor([0.1380, 0.0451, 0.0530, 0.0472, 0.0763, 0.0498, 0.0500, 0.0402], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0202, 0.0199, 0.0203, 0.0196, 0.0208, 0.0205, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 07:24:06,275 INFO [train.py:892] (0/4) Epoch 15, batch 1500, loss[loss=0.218, simple_loss=0.2912, pruned_loss=0.07235, over 19674.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2724, pruned_loss=0.07023, over 3947161.11 frames. ], batch size: 55, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:25:19,933 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27504.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:25:41,664 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.670e+02 4.316e+02 5.110e+02 6.263e+02 1.157e+03, threshold=1.022e+03, percent-clipped=1.0 +2023-03-28 07:25:58,303 INFO [train.py:892] (0/4) Epoch 15, batch 1550, loss[loss=0.3077, simple_loss=0.3656, pruned_loss=0.1249, over 19274.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2722, pruned_loss=0.06991, over 3946545.88 frames. ], batch size: 483, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:26:23,925 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4353, 2.7723, 2.2673, 1.9180, 2.4641, 2.7916, 2.6160, 2.6868], + device='cuda:0'), covar=tensor([0.0263, 0.0280, 0.0281, 0.0518, 0.0362, 0.0179, 0.0192, 0.0192], + device='cuda:0'), in_proj_covar=tensor([0.0071, 0.0068, 0.0077, 0.0084, 0.0087, 0.0060, 0.0058, 0.0060], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-28 07:27:27,283 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27560.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 07:27:38,115 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0651, 3.0420, 1.6313, 3.6973, 3.1974, 3.6897, 3.7085, 2.7682], + device='cuda:0'), covar=tensor([0.0580, 0.0551, 0.1730, 0.0501, 0.0580, 0.0324, 0.0514, 0.0789], + device='cuda:0'), in_proj_covar=tensor([0.0127, 0.0125, 0.0134, 0.0129, 0.0113, 0.0109, 0.0126, 0.0131], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 07:27:50,937 INFO [train.py:892] (0/4) Epoch 15, batch 1600, loss[loss=0.2166, simple_loss=0.2839, pruned_loss=0.07465, over 19754.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2727, pruned_loss=0.0699, over 3947306.06 frames. ], batch size: 253, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:28:23,314 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-28 07:29:11,929 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27606.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:29:15,494 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27608.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 07:29:28,191 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.222e+02 4.211e+02 5.223e+02 6.136e+02 1.197e+03, threshold=1.045e+03, percent-clipped=1.0 +2023-03-28 07:29:38,027 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6870, 3.5317, 3.5678, 3.3111, 3.6722, 2.8028, 2.9642, 1.6482], + device='cuda:0'), covar=tensor([0.0222, 0.0251, 0.0170, 0.0222, 0.0158, 0.0917, 0.0783, 0.1781], + device='cuda:0'), in_proj_covar=tensor([0.0092, 0.0127, 0.0103, 0.0121, 0.0107, 0.0124, 0.0135, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 07:29:43,673 INFO [train.py:892] (0/4) Epoch 15, batch 1650, loss[loss=0.2195, simple_loss=0.2975, pruned_loss=0.07074, over 19678.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2718, pruned_loss=0.06938, over 3948573.53 frames. ], batch size: 55, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:29:45,002 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-03-28 07:29:57,755 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8659, 3.8895, 2.3249, 4.0078, 4.2926, 1.8200, 3.4795, 3.2153], + device='cuda:0'), covar=tensor([0.0614, 0.0797, 0.2678, 0.0835, 0.0447, 0.2886, 0.1080, 0.0761], + device='cuda:0'), in_proj_covar=tensor([0.0204, 0.0231, 0.0214, 0.0228, 0.0193, 0.0196, 0.0223, 0.0171], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 07:30:53,184 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3056, 4.2974, 2.6296, 4.4773, 4.7368, 2.0286, 4.0582, 3.5756], + device='cuda:0'), covar=tensor([0.0571, 0.0830, 0.2568, 0.0884, 0.0449, 0.2898, 0.0898, 0.0666], + device='cuda:0'), in_proj_covar=tensor([0.0203, 0.0228, 0.0212, 0.0226, 0.0191, 0.0194, 0.0221, 0.0170], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 07:30:59,191 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4380, 3.0224, 4.8355, 4.0634, 4.7094, 4.7239, 4.6317, 4.4529], + device='cuda:0'), covar=tensor([0.0208, 0.0712, 0.0080, 0.0871, 0.0083, 0.0174, 0.0129, 0.0116], + device='cuda:0'), in_proj_covar=tensor([0.0084, 0.0092, 0.0075, 0.0146, 0.0069, 0.0085, 0.0077, 0.0070], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 07:31:01,132 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27654.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:31:26,096 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1666, 3.2230, 4.6136, 3.4105, 3.8692, 3.8226, 2.4813, 2.5892], + device='cuda:0'), covar=tensor([0.0741, 0.2685, 0.0418, 0.0818, 0.1277, 0.0857, 0.1987, 0.2202], + device='cuda:0'), in_proj_covar=tensor([0.0320, 0.0354, 0.0293, 0.0238, 0.0345, 0.0300, 0.0313, 0.0286], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 07:31:38,699 INFO [train.py:892] (0/4) Epoch 15, batch 1700, loss[loss=0.1851, simple_loss=0.2467, pruned_loss=0.06168, over 19826.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2711, pruned_loss=0.06882, over 3949335.23 frames. ], batch size: 103, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:33:14,467 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.834e+02 4.579e+02 5.497e+02 6.695e+02 1.218e+03, threshold=1.099e+03, percent-clipped=2.0 +2023-03-28 07:33:15,217 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27714.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 07:33:27,527 INFO [train.py:892] (0/4) Epoch 15, batch 1750, loss[loss=0.1735, simple_loss=0.2507, pruned_loss=0.04819, over 19891.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2714, pruned_loss=0.06903, over 3950704.71 frames. ], batch size: 87, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:34:49,025 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.6250, 1.8334, 1.5989, 1.0272, 1.6277, 1.7838, 1.7054, 1.7984], + device='cuda:0'), covar=tensor([0.0270, 0.0215, 0.0262, 0.0480, 0.0380, 0.0206, 0.0192, 0.0178], + device='cuda:0'), in_proj_covar=tensor([0.0072, 0.0069, 0.0077, 0.0083, 0.0087, 0.0060, 0.0058, 0.0060], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-28 07:35:04,128 INFO [train.py:892] (0/4) Epoch 15, batch 1800, loss[loss=0.2535, simple_loss=0.3161, pruned_loss=0.09541, over 19686.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2729, pruned_loss=0.07037, over 3949870.30 frames. ], batch size: 337, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:35:11,021 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27775.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 07:36:05,631 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27804.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:36:19,689 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.5580, 5.9626, 5.9087, 5.8315, 5.6594, 5.9428, 5.1939, 5.4225], + device='cuda:0'), covar=tensor([0.0405, 0.0419, 0.0592, 0.0394, 0.0534, 0.0519, 0.0683, 0.0785], + device='cuda:0'), in_proj_covar=tensor([0.0220, 0.0230, 0.0258, 0.0220, 0.0216, 0.0203, 0.0231, 0.0267], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 07:36:22,621 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.736e+02 4.432e+02 5.190e+02 6.215e+02 1.891e+03, threshold=1.038e+03, percent-clipped=1.0 +2023-03-28 07:36:34,448 INFO [train.py:892] (0/4) Epoch 15, batch 1850, loss[loss=0.202, simple_loss=0.2834, pruned_loss=0.06028, over 19662.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.274, pruned_loss=0.07008, over 3949244.43 frames. ], batch size: 55, lr: 1.04e-02, grad_scale: 8.0 +2023-03-28 07:36:41,902 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-15.pt +2023-03-28 07:37:42,185 INFO [train.py:892] (0/4) Epoch 16, batch 0, loss[loss=0.224, simple_loss=0.2918, pruned_loss=0.07813, over 19877.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2918, pruned_loss=0.07813, over 19877.00 frames. ], batch size: 139, lr: 1.00e-02, grad_scale: 8.0 +2023-03-28 07:37:42,186 INFO [train.py:917] (0/4) Computing validation loss +2023-03-28 07:38:12,720 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0596, 3.4699, 2.8995, 2.4126, 2.7804, 3.4142, 3.0492, 3.3069], + device='cuda:0'), covar=tensor([0.0296, 0.0195, 0.0244, 0.0428, 0.0344, 0.0233, 0.0173, 0.0173], + device='cuda:0'), in_proj_covar=tensor([0.0072, 0.0070, 0.0077, 0.0083, 0.0087, 0.0060, 0.0058, 0.0060], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-28 07:38:14,366 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8908, 3.1993, 2.6050, 2.2155, 2.6225, 3.0226, 2.7744, 3.2154], + device='cuda:0'), covar=tensor([0.0137, 0.0249, 0.0242, 0.0493, 0.0330, 0.0205, 0.0199, 0.0069], + device='cuda:0'), in_proj_covar=tensor([0.0072, 0.0070, 0.0077, 0.0083, 0.0087, 0.0060, 0.0058, 0.0060], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-03-28 07:38:15,255 INFO [train.py:926] (0/4) Epoch 16, validation: loss=0.1716, simple_loss=0.2504, pruned_loss=0.04639, over 2883724.00 frames. +2023-03-28 07:38:15,256 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-28 07:38:45,420 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.48 vs. limit=5.0 +2023-03-28 07:39:17,303 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27852.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:40:09,184 INFO [train.py:892] (0/4) Epoch 16, batch 50, loss[loss=0.1759, simple_loss=0.2504, pruned_loss=0.05075, over 19600.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2651, pruned_loss=0.06576, over 890772.34 frames. ], batch size: 45, lr: 1.00e-02, grad_scale: 8.0 +2023-03-28 07:41:32,614 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.518e+02 4.210e+02 5.030e+02 5.753e+02 1.491e+03, threshold=1.006e+03, percent-clipped=2.0 +2023-03-28 07:42:00,004 INFO [train.py:892] (0/4) Epoch 16, batch 100, loss[loss=0.1783, simple_loss=0.2479, pruned_loss=0.05433, over 19765.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.264, pruned_loss=0.06551, over 1570456.15 frames. ], batch size: 119, lr: 1.00e-02, grad_scale: 8.0 +2023-03-28 07:42:22,049 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3495, 3.4575, 2.0374, 4.3093, 3.8737, 4.2362, 4.3583, 3.2483], + device='cuda:0'), covar=tensor([0.0639, 0.0536, 0.1541, 0.0424, 0.0469, 0.0309, 0.0457, 0.0754], + device='cuda:0'), in_proj_covar=tensor([0.0130, 0.0126, 0.0135, 0.0131, 0.0114, 0.0111, 0.0126, 0.0132], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 07:42:45,991 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4764, 2.7671, 2.8820, 3.4220, 2.4116, 2.8684, 2.2102, 2.1742], + device='cuda:0'), covar=tensor([0.0570, 0.1916, 0.1055, 0.0354, 0.2104, 0.0765, 0.1346, 0.1800], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0331, 0.0229, 0.0168, 0.0240, 0.0184, 0.0203, 0.0211], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 07:43:53,900 INFO [train.py:892] (0/4) Epoch 16, batch 150, loss[loss=0.179, simple_loss=0.2474, pruned_loss=0.05529, over 19790.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.267, pruned_loss=0.06775, over 2098520.03 frames. ], batch size: 42, lr: 1.00e-02, grad_scale: 8.0 +2023-03-28 07:44:49,678 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-28000.pt +2023-03-28 07:45:25,173 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.668e+02 4.765e+02 5.560e+02 7.224e+02 1.633e+03, threshold=1.112e+03, percent-clipped=5.0 +2023-03-28 07:45:44,337 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.5138, 5.8958, 5.8894, 5.7884, 5.5384, 5.9196, 5.1938, 5.2764], + device='cuda:0'), covar=tensor([0.0414, 0.0388, 0.0518, 0.0406, 0.0539, 0.0446, 0.0589, 0.0929], + device='cuda:0'), in_proj_covar=tensor([0.0221, 0.0230, 0.0256, 0.0220, 0.0215, 0.0202, 0.0230, 0.0269], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 07:45:54,095 INFO [train.py:892] (0/4) Epoch 16, batch 200, loss[loss=0.2131, simple_loss=0.2839, pruned_loss=0.07112, over 19658.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.27, pruned_loss=0.06982, over 2507467.78 frames. ], batch size: 50, lr: 9.99e-03, grad_scale: 8.0 +2023-03-28 07:47:36,254 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28070.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 07:47:47,825 INFO [train.py:892] (0/4) Epoch 16, batch 250, loss[loss=0.2105, simple_loss=0.279, pruned_loss=0.07095, over 19836.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2682, pruned_loss=0.06859, over 2828201.34 frames. ], batch size: 197, lr: 9.98e-03, grad_scale: 8.0 +2023-03-28 07:49:14,132 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.535e+02 4.468e+02 5.270e+02 6.395e+02 1.144e+03, threshold=1.054e+03, percent-clipped=2.0 +2023-03-28 07:49:40,370 INFO [train.py:892] (0/4) Epoch 16, batch 300, loss[loss=0.2101, simple_loss=0.2797, pruned_loss=0.07029, over 19764.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2697, pruned_loss=0.06869, over 3076432.30 frames. ], batch size: 244, lr: 9.97e-03, grad_scale: 8.0 +2023-03-28 07:49:44,171 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1769, 4.1450, 4.5290, 4.1489, 3.9369, 4.3642, 4.2405, 4.6330], + device='cuda:0'), covar=tensor([0.0909, 0.0341, 0.0357, 0.0355, 0.0899, 0.0491, 0.0395, 0.0293], + device='cuda:0'), in_proj_covar=tensor([0.0264, 0.0201, 0.0201, 0.0207, 0.0195, 0.0210, 0.0207, 0.0191], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 07:51:33,563 INFO [train.py:892] (0/4) Epoch 16, batch 350, loss[loss=0.2077, simple_loss=0.2822, pruned_loss=0.06658, over 19727.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2701, pruned_loss=0.06847, over 3270108.23 frames. ], batch size: 50, lr: 9.96e-03, grad_scale: 8.0 +2023-03-28 07:53:01,621 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.842e+02 4.257e+02 5.267e+02 6.673e+02 1.330e+03, threshold=1.053e+03, percent-clipped=2.0 +2023-03-28 07:53:27,005 INFO [train.py:892] (0/4) Epoch 16, batch 400, loss[loss=0.1949, simple_loss=0.267, pruned_loss=0.06144, over 19891.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2696, pruned_loss=0.06815, over 3421223.91 frames. ], batch size: 87, lr: 9.95e-03, grad_scale: 8.0 +2023-03-28 07:53:35,152 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28229.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:54:36,203 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28255.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 07:55:12,385 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.61 vs. limit=2.0 +2023-03-28 07:55:21,933 INFO [train.py:892] (0/4) Epoch 16, batch 450, loss[loss=0.2299, simple_loss=0.2927, pruned_loss=0.0835, over 19764.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2716, pruned_loss=0.06948, over 3537293.99 frames. ], batch size: 233, lr: 9.95e-03, grad_scale: 8.0 +2023-03-28 07:55:46,690 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-03-28 07:55:48,318 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28287.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:55:55,207 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28290.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:56:47,446 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.970e+02 4.542e+02 5.285e+02 6.486e+02 1.059e+03, threshold=1.057e+03, percent-clipped=1.0 +2023-03-28 07:56:54,428 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28316.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 07:57:11,938 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.79 vs. limit=2.0 +2023-03-28 07:57:14,805 INFO [train.py:892] (0/4) Epoch 16, batch 500, loss[loss=0.1904, simple_loss=0.2647, pruned_loss=0.05804, over 19746.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2709, pruned_loss=0.06877, over 3629722.60 frames. ], batch size: 44, lr: 9.94e-03, grad_scale: 8.0 +2023-03-28 07:57:28,334 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0193, 2.3605, 2.9319, 3.3137, 3.8500, 4.2535, 4.0508, 4.1831], + device='cuda:0'), covar=tensor([0.0801, 0.1878, 0.1357, 0.0540, 0.0284, 0.0195, 0.0246, 0.0278], + device='cuda:0'), in_proj_covar=tensor([0.0148, 0.0169, 0.0171, 0.0137, 0.0122, 0.0112, 0.0110, 0.0102], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 07:58:04,386 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28348.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 07:58:31,268 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-03-28 07:58:55,410 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28370.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 07:59:07,688 INFO [train.py:892] (0/4) Epoch 16, batch 550, loss[loss=0.1941, simple_loss=0.2625, pruned_loss=0.06282, over 19861.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2705, pruned_loss=0.06885, over 3700121.29 frames. ], batch size: 104, lr: 9.93e-03, grad_scale: 8.0 +2023-03-28 07:59:12,770 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5844, 1.8962, 2.4104, 2.9065, 3.3317, 3.4892, 3.3184, 3.4951], + device='cuda:0'), covar=tensor([0.0910, 0.1892, 0.1293, 0.0628, 0.0385, 0.0263, 0.0336, 0.0349], + device='cuda:0'), in_proj_covar=tensor([0.0146, 0.0168, 0.0169, 0.0136, 0.0120, 0.0111, 0.0109, 0.0101], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 08:00:12,025 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6650, 2.1235, 2.5342, 3.0977, 3.5556, 3.7882, 3.6051, 3.7765], + device='cuda:0'), covar=tensor([0.1024, 0.1923, 0.1375, 0.0614, 0.0387, 0.0253, 0.0337, 0.0332], + device='cuda:0'), in_proj_covar=tensor([0.0145, 0.0167, 0.0168, 0.0135, 0.0119, 0.0110, 0.0108, 0.0100], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 08:00:19,660 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.69 vs. limit=5.0 +2023-03-28 08:00:36,163 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.975e+02 4.219e+02 4.967e+02 6.198e+02 1.278e+03, threshold=9.934e+02, percent-clipped=3.0 +2023-03-28 08:00:46,470 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28418.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 08:00:46,902 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-03-28 08:01:02,570 INFO [train.py:892] (0/4) Epoch 16, batch 600, loss[loss=0.18, simple_loss=0.2462, pruned_loss=0.05687, over 19709.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2694, pruned_loss=0.06811, over 3756478.56 frames. ], batch size: 109, lr: 9.92e-03, grad_scale: 8.0 +2023-03-28 08:01:08,490 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3044, 4.0310, 4.1502, 3.8735, 4.2440, 3.1007, 3.5479, 2.1792], + device='cuda:0'), covar=tensor([0.0162, 0.0210, 0.0124, 0.0176, 0.0123, 0.0772, 0.0691, 0.1420], + device='cuda:0'), in_proj_covar=tensor([0.0091, 0.0126, 0.0101, 0.0120, 0.0107, 0.0122, 0.0131, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 08:02:31,946 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7234, 2.8163, 4.1473, 3.6829, 3.9940, 4.1923, 3.9501, 3.9996], + device='cuda:0'), covar=tensor([0.0247, 0.0654, 0.0082, 0.0621, 0.0101, 0.0187, 0.0152, 0.0112], + device='cuda:0'), in_proj_covar=tensor([0.0084, 0.0092, 0.0075, 0.0144, 0.0069, 0.0084, 0.0077, 0.0070], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0004, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 08:02:53,446 INFO [train.py:892] (0/4) Epoch 16, batch 650, loss[loss=0.2006, simple_loss=0.2689, pruned_loss=0.06611, over 19736.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2693, pruned_loss=0.06797, over 3797897.72 frames. ], batch size: 92, lr: 9.91e-03, grad_scale: 8.0 +2023-03-28 08:03:15,836 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28486.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:03:31,400 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28492.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:04:17,631 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.821e+02 4.448e+02 5.454e+02 6.222e+02 1.151e+03, threshold=1.091e+03, percent-clipped=4.0 +2023-03-28 08:04:44,531 INFO [train.py:892] (0/4) Epoch 16, batch 700, loss[loss=0.2014, simple_loss=0.2606, pruned_loss=0.0711, over 19476.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2688, pruned_loss=0.0673, over 3831695.12 frames. ], batch size: 43, lr: 9.90e-03, grad_scale: 8.0 +2023-03-28 08:05:12,173 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2940, 4.8626, 4.8626, 5.2311, 4.8446, 5.5163, 5.3726, 5.5642], + device='cuda:0'), covar=tensor([0.0596, 0.0434, 0.0436, 0.0333, 0.0692, 0.0378, 0.0419, 0.0321], + device='cuda:0'), in_proj_covar=tensor([0.0133, 0.0152, 0.0177, 0.0148, 0.0152, 0.0133, 0.0135, 0.0171], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 08:05:34,244 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28547.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:05:42,197 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-28 08:05:45,637 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28553.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:06:37,723 INFO [train.py:892] (0/4) Epoch 16, batch 750, loss[loss=0.1874, simple_loss=0.2627, pruned_loss=0.05605, over 19811.00 frames. ], tot_loss[loss=0.201, simple_loss=0.268, pruned_loss=0.06698, over 3858731.43 frames. ], batch size: 82, lr: 9.89e-03, grad_scale: 8.0 +2023-03-28 08:06:59,777 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28585.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:07:58,873 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28611.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 08:08:06,521 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.992e+02 4.406e+02 5.087e+02 6.345e+02 1.381e+03, threshold=1.017e+03, percent-clipped=1.0 +2023-03-28 08:08:26,551 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6753, 2.8532, 4.1168, 3.2590, 3.4763, 3.3766, 2.1301, 2.3659], + device='cuda:0'), covar=tensor([0.0924, 0.2594, 0.0432, 0.0726, 0.1358, 0.1028, 0.1990, 0.2298], + device='cuda:0'), in_proj_covar=tensor([0.0323, 0.0354, 0.0300, 0.0242, 0.0351, 0.0305, 0.0318, 0.0290], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 08:08:34,050 INFO [train.py:892] (0/4) Epoch 16, batch 800, loss[loss=0.1857, simple_loss=0.2569, pruned_loss=0.05722, over 19747.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2697, pruned_loss=0.0675, over 3878546.02 frames. ], batch size: 89, lr: 9.89e-03, grad_scale: 8.0 +2023-03-28 08:09:04,126 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-28 08:09:12,917 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28643.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:09:13,667 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.96 vs. limit=5.0 +2023-03-28 08:10:15,996 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6727, 3.1472, 2.4981, 1.9751, 2.5633, 2.8693, 2.9342, 2.8254], + device='cuda:0'), covar=tensor([0.0235, 0.0216, 0.0257, 0.0533, 0.0307, 0.0262, 0.0143, 0.0190], + device='cuda:0'), in_proj_covar=tensor([0.0073, 0.0071, 0.0079, 0.0085, 0.0088, 0.0062, 0.0058, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-03-28 08:10:29,826 INFO [train.py:892] (0/4) Epoch 16, batch 850, loss[loss=0.1714, simple_loss=0.2412, pruned_loss=0.05086, over 19781.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2699, pruned_loss=0.06782, over 3894877.22 frames. ], batch size: 116, lr: 9.88e-03, grad_scale: 8.0 +2023-03-28 08:10:33,053 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6547, 1.8936, 2.3784, 2.8806, 3.2492, 3.4751, 3.4081, 3.4829], + device='cuda:0'), covar=tensor([0.0859, 0.1804, 0.1208, 0.0581, 0.0414, 0.0246, 0.0284, 0.0276], + device='cuda:0'), in_proj_covar=tensor([0.0144, 0.0164, 0.0166, 0.0134, 0.0119, 0.0110, 0.0107, 0.0100], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 08:11:56,546 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.530e+02 4.659e+02 5.536e+02 6.490e+02 1.175e+03, threshold=1.107e+03, percent-clipped=2.0 +2023-03-28 08:12:21,629 INFO [train.py:892] (0/4) Epoch 16, batch 900, loss[loss=0.189, simple_loss=0.2639, pruned_loss=0.05702, over 19812.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2679, pruned_loss=0.06674, over 3908127.46 frames. ], batch size: 82, lr: 9.87e-03, grad_scale: 8.0 +2023-03-28 08:12:40,802 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-28 08:14:18,779 INFO [train.py:892] (0/4) Epoch 16, batch 950, loss[loss=0.1931, simple_loss=0.2798, pruned_loss=0.0532, over 19875.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2677, pruned_loss=0.06632, over 3917569.94 frames. ], batch size: 53, lr: 9.86e-03, grad_scale: 8.0 +2023-03-28 08:14:58,457 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6971, 3.8482, 2.2831, 3.9753, 4.1052, 1.7205, 3.3390, 3.0927], + device='cuda:0'), covar=tensor([0.0740, 0.0741, 0.2575, 0.0674, 0.0416, 0.3083, 0.1099, 0.0730], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0234, 0.0218, 0.0233, 0.0202, 0.0201, 0.0230, 0.0174], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 08:15:44,965 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.335e+02 4.634e+02 5.431e+02 6.353e+02 1.268e+03, threshold=1.086e+03, percent-clipped=1.0 +2023-03-28 08:15:50,870 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3143, 2.9756, 3.5705, 2.5666, 3.4718, 2.7706, 3.0518, 3.4878], + device='cuda:0'), covar=tensor([0.0488, 0.0452, 0.0403, 0.0782, 0.0412, 0.0458, 0.0441, 0.0383], + device='cuda:0'), in_proj_covar=tensor([0.0065, 0.0070, 0.0069, 0.0100, 0.0067, 0.0065, 0.0063, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 08:16:11,363 INFO [train.py:892] (0/4) Epoch 16, batch 1000, loss[loss=0.2181, simple_loss=0.2695, pruned_loss=0.08338, over 19817.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.268, pruned_loss=0.06669, over 3923612.54 frames. ], batch size: 147, lr: 9.85e-03, grad_scale: 8.0 +2023-03-28 08:16:45,792 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28842.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:16:59,636 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28848.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:17:31,418 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28861.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:18:01,600 INFO [train.py:892] (0/4) Epoch 16, batch 1050, loss[loss=0.184, simple_loss=0.2541, pruned_loss=0.05691, over 19762.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2687, pruned_loss=0.0675, over 3930221.82 frames. ], batch size: 88, lr: 9.84e-03, grad_scale: 8.0 +2023-03-28 08:18:23,328 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28885.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:19:02,746 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.65 vs. limit=2.0 +2023-03-28 08:19:23,531 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28911.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 08:19:30,252 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.700e+02 4.466e+02 5.126e+02 6.804e+02 1.153e+03, threshold=1.025e+03, percent-clipped=1.0 +2023-03-28 08:19:48,277 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28922.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:19:56,640 INFO [train.py:892] (0/4) Epoch 16, batch 1100, loss[loss=0.196, simple_loss=0.268, pruned_loss=0.06202, over 19772.00 frames. ], tot_loss[loss=0.202, simple_loss=0.269, pruned_loss=0.06754, over 3934892.36 frames. ], batch size: 53, lr: 9.84e-03, grad_scale: 8.0 +2023-03-28 08:20:01,808 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-03-28 08:20:14,115 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28933.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:20:37,482 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28943.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:21:06,057 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5551, 4.5160, 4.9689, 4.4950, 4.1761, 4.7463, 4.6442, 5.0647], + device='cuda:0'), covar=tensor([0.0803, 0.0322, 0.0302, 0.0314, 0.0798, 0.0430, 0.0356, 0.0289], + device='cuda:0'), in_proj_covar=tensor([0.0265, 0.0205, 0.0203, 0.0209, 0.0198, 0.0213, 0.0210, 0.0195], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 08:21:08,225 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.75 vs. limit=5.0 +2023-03-28 08:21:12,717 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28959.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 08:21:48,716 INFO [train.py:892] (0/4) Epoch 16, batch 1150, loss[loss=0.2098, simple_loss=0.2604, pruned_loss=0.07963, over 19756.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2682, pruned_loss=0.06734, over 3937218.01 frames. ], batch size: 213, lr: 9.83e-03, grad_scale: 8.0 +2023-03-28 08:22:23,525 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28991.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:22:51,053 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29002.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:23:05,748 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6467, 2.8601, 3.3432, 3.2002, 3.5164, 3.6381, 4.4761, 4.8742], + device='cuda:0'), covar=tensor([0.0479, 0.1930, 0.1593, 0.2143, 0.1941, 0.1541, 0.0519, 0.0506], + device='cuda:0'), in_proj_covar=tensor([0.0223, 0.0225, 0.0247, 0.0238, 0.0272, 0.0235, 0.0198, 0.0212], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 08:23:15,273 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.211e+02 4.550e+02 5.400e+02 6.345e+02 1.264e+03, threshold=1.080e+03, percent-clipped=1.0 +2023-03-28 08:23:38,392 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-28 08:23:43,687 INFO [train.py:892] (0/4) Epoch 16, batch 1200, loss[loss=0.2334, simple_loss=0.2914, pruned_loss=0.08767, over 19741.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2682, pruned_loss=0.06709, over 3941009.43 frames. ], batch size: 259, lr: 9.82e-03, grad_scale: 8.0 +2023-03-28 08:25:09,298 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29063.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:25:38,187 INFO [train.py:892] (0/4) Epoch 16, batch 1250, loss[loss=0.1692, simple_loss=0.2344, pruned_loss=0.05197, over 19856.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2665, pruned_loss=0.06621, over 3943349.52 frames. ], batch size: 112, lr: 9.81e-03, grad_scale: 8.0 +2023-03-28 08:26:52,507 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0044, 4.9762, 5.4403, 4.9921, 4.3198, 5.3200, 5.0820, 5.6376], + device='cuda:0'), covar=tensor([0.0862, 0.0299, 0.0324, 0.0295, 0.0668, 0.0371, 0.0373, 0.0262], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0202, 0.0201, 0.0208, 0.0196, 0.0210, 0.0207, 0.0190], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 08:27:06,755 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.820e+02 4.171e+02 4.962e+02 5.864e+02 1.336e+03, threshold=9.924e+02, percent-clipped=4.0 +2023-03-28 08:27:33,968 INFO [train.py:892] (0/4) Epoch 16, batch 1300, loss[loss=0.2206, simple_loss=0.282, pruned_loss=0.07957, over 19760.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2664, pruned_loss=0.0661, over 3944747.37 frames. ], batch size: 182, lr: 9.80e-03, grad_scale: 16.0 +2023-03-28 08:28:12,078 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29142.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:28:16,599 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.68 vs. limit=5.0 +2023-03-28 08:28:24,924 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29148.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:28:31,996 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.80 vs. limit=5.0 +2023-03-28 08:29:14,448 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2356, 3.3727, 3.6330, 4.3060, 2.7150, 3.1841, 2.7682, 2.4131], + device='cuda:0'), covar=tensor([0.0478, 0.2294, 0.0852, 0.0302, 0.2237, 0.0899, 0.1299, 0.1901], + device='cuda:0'), in_proj_covar=tensor([0.0216, 0.0332, 0.0232, 0.0173, 0.0240, 0.0188, 0.0205, 0.0211], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 08:29:27,196 INFO [train.py:892] (0/4) Epoch 16, batch 1350, loss[loss=0.2016, simple_loss=0.2694, pruned_loss=0.0669, over 19712.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2673, pruned_loss=0.06644, over 3946078.23 frames. ], batch size: 85, lr: 9.80e-03, grad_scale: 16.0 +2023-03-28 08:30:00,822 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29190.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:30:01,097 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6586, 3.0921, 2.6599, 2.0553, 2.5587, 2.8477, 2.8623, 3.0323], + device='cuda:0'), covar=tensor([0.0208, 0.0179, 0.0216, 0.0513, 0.0317, 0.0225, 0.0185, 0.0154], + device='cuda:0'), in_proj_covar=tensor([0.0074, 0.0072, 0.0080, 0.0085, 0.0088, 0.0063, 0.0059, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-03-28 08:30:15,635 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29196.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:30:55,172 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.081e+02 4.385e+02 5.390e+02 6.526e+02 1.400e+03, threshold=1.078e+03, percent-clipped=3.0 +2023-03-28 08:31:01,880 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29217.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:31:23,382 INFO [train.py:892] (0/4) Epoch 16, batch 1400, loss[loss=0.1875, simple_loss=0.2476, pruned_loss=0.06371, over 19782.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2677, pruned_loss=0.06619, over 3945564.72 frames. ], batch size: 193, lr: 9.79e-03, grad_scale: 16.0 +2023-03-28 08:31:47,864 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29236.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:33:17,629 INFO [train.py:892] (0/4) Epoch 16, batch 1450, loss[loss=0.1804, simple_loss=0.2494, pruned_loss=0.05565, over 19671.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2681, pruned_loss=0.06636, over 3946563.81 frames. ], batch size: 73, lr: 9.78e-03, grad_scale: 16.0 +2023-03-28 08:34:08,458 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29297.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:34:43,963 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.554e+02 4.411e+02 5.175e+02 6.101e+02 1.424e+03, threshold=1.035e+03, percent-clipped=1.0 +2023-03-28 08:35:11,606 INFO [train.py:892] (0/4) Epoch 16, batch 1500, loss[loss=0.2011, simple_loss=0.2564, pruned_loss=0.0729, over 19848.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2677, pruned_loss=0.06606, over 3947777.80 frames. ], batch size: 197, lr: 9.77e-03, grad_scale: 16.0 +2023-03-28 08:36:25,423 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29358.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:37:05,542 INFO [train.py:892] (0/4) Epoch 16, batch 1550, loss[loss=0.1893, simple_loss=0.2482, pruned_loss=0.06523, over 19758.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2672, pruned_loss=0.06597, over 3950128.32 frames. ], batch size: 205, lr: 9.76e-03, grad_scale: 16.0 +2023-03-28 08:38:32,266 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.261e+02 4.365e+02 5.019e+02 6.130e+02 1.225e+03, threshold=1.004e+03, percent-clipped=3.0 +2023-03-28 08:39:00,859 INFO [train.py:892] (0/4) Epoch 16, batch 1600, loss[loss=0.2873, simple_loss=0.3472, pruned_loss=0.1137, over 19476.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2685, pruned_loss=0.06692, over 3950526.94 frames. ], batch size: 396, lr: 9.76e-03, grad_scale: 16.0 +2023-03-28 08:40:51,211 INFO [train.py:892] (0/4) Epoch 16, batch 1650, loss[loss=0.1815, simple_loss=0.2469, pruned_loss=0.05811, over 19800.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2684, pruned_loss=0.06685, over 3950659.25 frames. ], batch size: 150, lr: 9.75e-03, grad_scale: 16.0 +2023-03-28 08:41:00,094 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1687, 3.1130, 1.9412, 3.7603, 3.4577, 3.7155, 3.8464, 2.9902], + device='cuda:0'), covar=tensor([0.0568, 0.0563, 0.1762, 0.0492, 0.0515, 0.0375, 0.0420, 0.0629], + device='cuda:0'), in_proj_covar=tensor([0.0132, 0.0129, 0.0137, 0.0134, 0.0117, 0.0115, 0.0130, 0.0133], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 08:42:19,143 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.587e+02 4.298e+02 4.820e+02 5.850e+02 1.066e+03, threshold=9.641e+02, percent-clipped=1.0 +2023-03-28 08:42:27,073 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29517.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:42:45,623 INFO [train.py:892] (0/4) Epoch 16, batch 1700, loss[loss=0.1964, simple_loss=0.2684, pruned_loss=0.06223, over 19772.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2697, pruned_loss=0.06789, over 3951122.26 frames. ], batch size: 69, lr: 9.74e-03, grad_scale: 16.0 +2023-03-28 08:43:26,336 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8687, 3.2039, 3.1707, 3.8730, 2.6599, 3.3467, 2.6394, 2.3806], + device='cuda:0'), covar=tensor([0.0437, 0.2241, 0.1120, 0.0314, 0.2090, 0.0615, 0.1257, 0.1873], + device='cuda:0'), in_proj_covar=tensor([0.0218, 0.0333, 0.0234, 0.0173, 0.0242, 0.0189, 0.0205, 0.0210], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 08:43:45,472 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29551.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:44:15,331 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29565.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:44:35,363 INFO [train.py:892] (0/4) Epoch 16, batch 1750, loss[loss=0.1934, simple_loss=0.2558, pruned_loss=0.06549, over 19732.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2711, pruned_loss=0.06911, over 3950441.14 frames. ], batch size: 80, lr: 9.73e-03, grad_scale: 16.0 +2023-03-28 08:45:10,754 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29592.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:45:10,973 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9079, 2.8270, 1.6168, 3.3775, 3.1728, 3.3487, 3.4509, 2.7843], + device='cuda:0'), covar=tensor([0.0609, 0.0618, 0.1761, 0.0597, 0.0504, 0.0432, 0.0546, 0.0723], + device='cuda:0'), in_proj_covar=tensor([0.0131, 0.0128, 0.0136, 0.0133, 0.0116, 0.0114, 0.0129, 0.0132], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 08:45:50,177 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29612.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:45:51,952 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7011, 4.2846, 4.3812, 4.7315, 4.2820, 4.7998, 4.7933, 4.9556], + device='cuda:0'), covar=tensor([0.0600, 0.0317, 0.0410, 0.0253, 0.0544, 0.0302, 0.0327, 0.0237], + device='cuda:0'), in_proj_covar=tensor([0.0134, 0.0154, 0.0178, 0.0149, 0.0150, 0.0133, 0.0133, 0.0172], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 08:45:52,891 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.648e+02 4.260e+02 5.446e+02 6.419e+02 1.506e+03, threshold=1.089e+03, percent-clipped=4.0 +2023-03-28 08:46:14,349 INFO [train.py:892] (0/4) Epoch 16, batch 1800, loss[loss=0.1777, simple_loss=0.2509, pruned_loss=0.05222, over 19802.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.2696, pruned_loss=0.06835, over 3950913.68 frames. ], batch size: 107, lr: 9.72e-03, grad_scale: 16.0 +2023-03-28 08:47:15,958 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29658.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:47:23,459 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29662.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:47:46,315 INFO [train.py:892] (0/4) Epoch 16, batch 1850, loss[loss=0.2139, simple_loss=0.2877, pruned_loss=0.0701, over 19829.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2701, pruned_loss=0.06768, over 3949673.65 frames. ], batch size: 57, lr: 9.72e-03, grad_scale: 16.0 +2023-03-28 08:47:53,527 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-16.pt +2023-03-28 08:48:53,434 INFO [train.py:892] (0/4) Epoch 17, batch 0, loss[loss=0.177, simple_loss=0.2473, pruned_loss=0.05334, over 19689.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2473, pruned_loss=0.05334, over 19689.00 frames. ], batch size: 82, lr: 9.42e-03, grad_scale: 16.0 +2023-03-28 08:48:53,435 INFO [train.py:917] (0/4) Computing validation loss +2023-03-28 08:49:26,240 INFO [train.py:926] (0/4) Epoch 17, validation: loss=0.1709, simple_loss=0.2495, pruned_loss=0.0462, over 2883724.00 frames. +2023-03-28 08:49:26,242 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-28 08:50:25,366 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7988, 4.9418, 5.3617, 4.8632, 4.2694, 5.1190, 4.9527, 5.5470], + device='cuda:0'), covar=tensor([0.0969, 0.0332, 0.0368, 0.0337, 0.0700, 0.0397, 0.0395, 0.0264], + device='cuda:0'), in_proj_covar=tensor([0.0262, 0.0201, 0.0205, 0.0208, 0.0196, 0.0211, 0.0209, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 08:50:27,364 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29706.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:50:46,147 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.983e+02 4.092e+02 5.169e+02 6.324e+02 1.457e+03, threshold=1.034e+03, percent-clipped=3.0 +2023-03-28 08:51:08,286 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29723.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 08:51:25,301 INFO [train.py:892] (0/4) Epoch 17, batch 50, loss[loss=0.1733, simple_loss=0.2397, pruned_loss=0.05344, over 19840.00 frames. ], tot_loss[loss=0.2028, simple_loss=0.2681, pruned_loss=0.06877, over 890661.39 frames. ], batch size: 160, lr: 9.41e-03, grad_scale: 16.0 +2023-03-28 08:53:21,582 INFO [train.py:892] (0/4) Epoch 17, batch 100, loss[loss=0.1737, simple_loss=0.2461, pruned_loss=0.05066, over 19785.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2647, pruned_loss=0.06448, over 1570261.33 frames. ], batch size: 211, lr: 9.41e-03, grad_scale: 16.0 +2023-03-28 08:54:13,291 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29802.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:54:38,181 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.079e+02 4.290e+02 5.009e+02 6.167e+02 1.031e+03, threshold=1.002e+03, percent-clipped=0.0 +2023-03-28 08:55:19,244 INFO [train.py:892] (0/4) Epoch 17, batch 150, loss[loss=0.3438, simple_loss=0.391, pruned_loss=0.1483, over 19176.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2657, pruned_loss=0.06585, over 2098607.97 frames. ], batch size: 452, lr: 9.40e-03, grad_scale: 16.0 +2023-03-28 08:56:29,301 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29863.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:56:34,842 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.0802, 2.3050, 2.4448, 2.2325, 2.1881, 2.4258, 2.1883, 2.3777], + device='cuda:0'), covar=tensor([0.0259, 0.0247, 0.0207, 0.0226, 0.0394, 0.0236, 0.0440, 0.0302], + device='cuda:0'), in_proj_covar=tensor([0.0059, 0.0056, 0.0061, 0.0052, 0.0066, 0.0062, 0.0079, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-03-28 08:57:07,857 INFO [train.py:892] (0/4) Epoch 17, batch 200, loss[loss=0.1764, simple_loss=0.2463, pruned_loss=0.0533, over 19476.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2638, pruned_loss=0.06457, over 2510707.30 frames. ], batch size: 43, lr: 9.39e-03, grad_scale: 16.0 +2023-03-28 08:57:33,270 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29892.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:58:05,615 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29907.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 08:58:19,263 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.430e+02 4.252e+02 5.177e+02 6.087e+02 1.116e+03, threshold=1.035e+03, percent-clipped=2.0 +2023-03-28 08:58:59,500 INFO [train.py:892] (0/4) Epoch 17, batch 250, loss[loss=0.216, simple_loss=0.2794, pruned_loss=0.07632, over 19700.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2645, pruned_loss=0.06464, over 2830538.97 frames. ], batch size: 265, lr: 9.38e-03, grad_scale: 16.0 +2023-03-28 08:59:21,208 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-28 08:59:22,509 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29940.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:00:54,561 INFO [train.py:892] (0/4) Epoch 17, batch 300, loss[loss=0.2029, simple_loss=0.2701, pruned_loss=0.06782, over 19780.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.265, pruned_loss=0.06457, over 3078946.11 frames. ], batch size: 215, lr: 9.37e-03, grad_scale: 16.0 +2023-03-28 09:01:35,922 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-30000.pt +2023-03-28 09:02:01,472 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.0559, 2.3067, 2.4073, 2.1728, 2.2758, 2.3464, 2.2547, 2.3943], + device='cuda:0'), covar=tensor([0.0297, 0.0253, 0.0287, 0.0254, 0.0315, 0.0251, 0.0371, 0.0301], + device='cuda:0'), in_proj_covar=tensor([0.0060, 0.0057, 0.0062, 0.0053, 0.0066, 0.0063, 0.0080, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-03-28 09:02:13,029 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.673e+02 4.129e+02 4.962e+02 6.041e+02 9.266e+02, threshold=9.924e+02, percent-clipped=0.0 +2023-03-28 09:02:14,353 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2230, 2.7106, 3.1258, 2.7424, 3.2318, 3.3084, 4.2784, 4.5675], + device='cuda:0'), covar=tensor([0.0585, 0.1888, 0.1622, 0.2483, 0.1839, 0.1669, 0.0420, 0.0470], + device='cuda:0'), in_proj_covar=tensor([0.0226, 0.0226, 0.0249, 0.0242, 0.0275, 0.0239, 0.0200, 0.0214], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 09:02:24,183 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30018.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 09:02:53,691 INFO [train.py:892] (0/4) Epoch 17, batch 350, loss[loss=0.1972, simple_loss=0.2616, pruned_loss=0.06644, over 19839.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.264, pruned_loss=0.06418, over 3273233.97 frames. ], batch size: 184, lr: 9.37e-03, grad_scale: 16.0 +2023-03-28 09:04:28,547 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5352, 2.7983, 2.9609, 3.4106, 2.4290, 3.0267, 2.1567, 2.1772], + device='cuda:0'), covar=tensor([0.0511, 0.1751, 0.1026, 0.0373, 0.2108, 0.0685, 0.1412, 0.1821], + device='cuda:0'), in_proj_covar=tensor([0.0217, 0.0333, 0.0232, 0.0173, 0.0239, 0.0187, 0.0203, 0.0209], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 09:04:44,388 INFO [train.py:892] (0/4) Epoch 17, batch 400, loss[loss=0.1641, simple_loss=0.2356, pruned_loss=0.04626, over 19597.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2651, pruned_loss=0.06456, over 3422860.56 frames. ], batch size: 45, lr: 9.36e-03, grad_scale: 16.0 +2023-03-28 09:05:06,558 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-03-28 09:05:59,369 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.983e+02 4.154e+02 5.101e+02 6.100e+02 1.061e+03, threshold=1.020e+03, percent-clipped=2.0 +2023-03-28 09:06:25,494 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0220, 3.9667, 4.3888, 3.9290, 3.7621, 4.2150, 3.9818, 4.4368], + device='cuda:0'), covar=tensor([0.0843, 0.0329, 0.0307, 0.0348, 0.0936, 0.0471, 0.0469, 0.0316], + device='cuda:0'), in_proj_covar=tensor([0.0262, 0.0201, 0.0204, 0.0208, 0.0195, 0.0212, 0.0210, 0.0194], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 09:06:25,557 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30125.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:06:38,291 INFO [train.py:892] (0/4) Epoch 17, batch 450, loss[loss=0.208, simple_loss=0.2866, pruned_loss=0.06466, over 19685.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2662, pruned_loss=0.06489, over 3537669.18 frames. ], batch size: 55, lr: 9.35e-03, grad_scale: 16.0 +2023-03-28 09:07:39,452 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-28 09:07:40,870 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30158.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:07:41,637 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.71 vs. limit=5.0 +2023-03-28 09:08:00,520 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6282, 4.9412, 5.0106, 4.8681, 4.5496, 4.9521, 4.4532, 4.5620], + device='cuda:0'), covar=tensor([0.0462, 0.0440, 0.0498, 0.0441, 0.0600, 0.0547, 0.0692, 0.0795], + device='cuda:0'), in_proj_covar=tensor([0.0229, 0.0238, 0.0265, 0.0230, 0.0222, 0.0214, 0.0238, 0.0279], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 09:08:28,414 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.86 vs. limit=5.0 +2023-03-28 09:08:32,541 INFO [train.py:892] (0/4) Epoch 17, batch 500, loss[loss=0.185, simple_loss=0.2474, pruned_loss=0.06135, over 19866.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2656, pruned_loss=0.06495, over 3629396.58 frames. ], batch size: 136, lr: 9.34e-03, grad_scale: 16.0 +2023-03-28 09:08:43,954 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30186.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:09:26,767 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0686, 2.9795, 1.6882, 3.7032, 3.3794, 3.6687, 3.7330, 2.8974], + device='cuda:0'), covar=tensor([0.0535, 0.0580, 0.1782, 0.0505, 0.0492, 0.0348, 0.0525, 0.0727], + device='cuda:0'), in_proj_covar=tensor([0.0131, 0.0128, 0.0136, 0.0132, 0.0115, 0.0114, 0.0130, 0.0133], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 09:09:30,899 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30207.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:09:35,282 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30209.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:09:45,649 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.879e+02 4.496e+02 5.220e+02 6.735e+02 1.165e+03, threshold=1.044e+03, percent-clipped=2.0 +2023-03-28 09:10:25,871 INFO [train.py:892] (0/4) Epoch 17, batch 550, loss[loss=0.2737, simple_loss=0.3245, pruned_loss=0.1114, over 19607.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2674, pruned_loss=0.06625, over 3699065.11 frames. ], batch size: 351, lr: 9.34e-03, grad_scale: 16.0 +2023-03-28 09:11:18,207 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30255.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:11:53,820 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30270.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:12:19,497 INFO [train.py:892] (0/4) Epoch 17, batch 600, loss[loss=0.1918, simple_loss=0.2603, pruned_loss=0.06165, over 19761.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2695, pruned_loss=0.06728, over 3752461.38 frames. ], batch size: 100, lr: 9.33e-03, grad_scale: 16.0 +2023-03-28 09:13:33,414 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.677e+02 4.467e+02 5.041e+02 6.108e+02 1.228e+03, threshold=1.008e+03, percent-clipped=2.0 +2023-03-28 09:13:44,484 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30318.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 09:13:58,317 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.2849, 1.7183, 1.8564, 2.5838, 2.8022, 2.9614, 2.8723, 2.9383], + device='cuda:0'), covar=tensor([0.0959, 0.1933, 0.1525, 0.0608, 0.0469, 0.0310, 0.0332, 0.0325], + device='cuda:0'), in_proj_covar=tensor([0.0145, 0.0168, 0.0168, 0.0138, 0.0119, 0.0113, 0.0108, 0.0101], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 09:14:12,060 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-28 09:14:12,606 INFO [train.py:892] (0/4) Epoch 17, batch 650, loss[loss=0.1953, simple_loss=0.2666, pruned_loss=0.06196, over 19785.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2684, pruned_loss=0.06683, over 3796307.48 frames. ], batch size: 233, lr: 9.32e-03, grad_scale: 16.0 +2023-03-28 09:15:18,851 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30360.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:15:30,965 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30366.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:16:03,578 INFO [train.py:892] (0/4) Epoch 17, batch 700, loss[loss=0.1932, simple_loss=0.2566, pruned_loss=0.06491, over 19743.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2669, pruned_loss=0.06587, over 3831041.35 frames. ], batch size: 129, lr: 9.31e-03, grad_scale: 16.0 +2023-03-28 09:16:46,751 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.1105, 5.4468, 5.4556, 5.3619, 5.1504, 5.4010, 4.8776, 4.9421], + device='cuda:0'), covar=tensor([0.0359, 0.0409, 0.0489, 0.0408, 0.0522, 0.0471, 0.0673, 0.0837], + device='cuda:0'), in_proj_covar=tensor([0.0224, 0.0234, 0.0260, 0.0226, 0.0218, 0.0211, 0.0235, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 09:17:22,278 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.759e+02 4.180e+02 5.300e+02 6.975e+02 1.622e+03, threshold=1.060e+03, percent-clipped=3.0 +2023-03-28 09:17:39,007 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30421.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:17:59,217 INFO [train.py:892] (0/4) Epoch 17, batch 750, loss[loss=0.1921, simple_loss=0.2645, pruned_loss=0.05984, over 19852.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.266, pruned_loss=0.06526, over 3856670.05 frames. ], batch size: 60, lr: 9.31e-03, grad_scale: 16.0 +2023-03-28 09:19:01,921 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30458.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:19:53,030 INFO [train.py:892] (0/4) Epoch 17, batch 800, loss[loss=0.215, simple_loss=0.2762, pruned_loss=0.07684, over 19837.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2662, pruned_loss=0.06526, over 3876654.42 frames. ], batch size: 166, lr: 9.30e-03, grad_scale: 16.0 +2023-03-28 09:19:53,803 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30481.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:20:52,539 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30506.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:21:10,979 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.987e+02 4.028e+02 4.946e+02 5.803e+02 1.096e+03, threshold=9.891e+02, percent-clipped=1.0 +2023-03-28 09:21:46,489 INFO [train.py:892] (0/4) Epoch 17, batch 850, loss[loss=0.2083, simple_loss=0.2722, pruned_loss=0.07222, over 19757.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2667, pruned_loss=0.06546, over 3893255.44 frames. ], batch size: 125, lr: 9.29e-03, grad_scale: 16.0 +2023-03-28 09:22:49,453 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5813, 3.6172, 2.1872, 3.7741, 3.9217, 1.7193, 3.1426, 2.9455], + device='cuda:0'), covar=tensor([0.0704, 0.0812, 0.2826, 0.0692, 0.0451, 0.2926, 0.1159, 0.0791], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0237, 0.0218, 0.0236, 0.0206, 0.0199, 0.0228, 0.0173], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 09:23:05,860 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30565.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:23:40,236 INFO [train.py:892] (0/4) Epoch 17, batch 900, loss[loss=0.1625, simple_loss=0.2326, pruned_loss=0.04619, over 19708.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.265, pruned_loss=0.06467, over 3906906.63 frames. ], batch size: 78, lr: 9.28e-03, grad_scale: 16.0 +2023-03-28 09:24:54,335 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.960e+02 4.086e+02 5.123e+02 6.200e+02 1.238e+03, threshold=1.025e+03, percent-clipped=1.0 +2023-03-28 09:25:34,354 INFO [train.py:892] (0/4) Epoch 17, batch 950, loss[loss=0.1946, simple_loss=0.2654, pruned_loss=0.06186, over 19773.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2653, pruned_loss=0.06483, over 3915352.34 frames. ], batch size: 169, lr: 9.28e-03, grad_scale: 16.0 +2023-03-28 09:26:04,849 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5141, 1.9158, 2.2466, 2.7939, 3.1815, 3.2719, 3.1984, 3.3035], + device='cuda:0'), covar=tensor([0.0967, 0.1851, 0.1366, 0.0622, 0.0426, 0.0294, 0.0374, 0.0366], + device='cuda:0'), in_proj_covar=tensor([0.0146, 0.0166, 0.0166, 0.0137, 0.0119, 0.0112, 0.0107, 0.0101], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 09:26:23,051 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-28 09:26:28,393 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30655.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:27:25,986 INFO [train.py:892] (0/4) Epoch 17, batch 1000, loss[loss=0.2066, simple_loss=0.2678, pruned_loss=0.0727, over 19780.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2646, pruned_loss=0.06395, over 3923025.51 frames. ], batch size: 241, lr: 9.27e-03, grad_scale: 16.0 +2023-03-28 09:28:42,764 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.493e+02 4.412e+02 5.308e+02 6.255e+02 1.077e+03, threshold=1.062e+03, percent-clipped=1.0 +2023-03-28 09:28:47,908 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30716.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:28:48,082 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30716.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 09:29:20,828 INFO [train.py:892] (0/4) Epoch 17, batch 1050, loss[loss=0.1755, simple_loss=0.2332, pruned_loss=0.0589, over 19831.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2642, pruned_loss=0.06384, over 3931025.77 frames. ], batch size: 121, lr: 9.26e-03, grad_scale: 16.0 +2023-03-28 09:30:56,127 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.86 vs. limit=5.0 +2023-03-28 09:31:15,320 INFO [train.py:892] (0/4) Epoch 17, batch 1100, loss[loss=0.1698, simple_loss=0.246, pruned_loss=0.04677, over 19620.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2656, pruned_loss=0.06433, over 3934265.72 frames. ], batch size: 52, lr: 9.26e-03, grad_scale: 16.0 +2023-03-28 09:31:16,112 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30781.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:32:31,430 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.889e+02 4.403e+02 5.206e+02 6.352e+02 1.143e+03, threshold=1.041e+03, percent-clipped=2.0 +2023-03-28 09:33:04,320 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30829.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:33:08,514 INFO [train.py:892] (0/4) Epoch 17, batch 1150, loss[loss=0.196, simple_loss=0.2607, pruned_loss=0.06567, over 19853.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2662, pruned_loss=0.06485, over 3938000.36 frames. ], batch size: 78, lr: 9.25e-03, grad_scale: 16.0 +2023-03-28 09:33:27,437 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-28 09:34:25,771 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30865.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:35:00,471 INFO [train.py:892] (0/4) Epoch 17, batch 1200, loss[loss=0.239, simple_loss=0.2983, pruned_loss=0.08982, over 19784.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2667, pruned_loss=0.06524, over 3941445.99 frames. ], batch size: 321, lr: 9.24e-03, grad_scale: 16.0 +2023-03-28 09:35:28,522 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.59 vs. limit=5.0 +2023-03-28 09:35:50,758 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30903.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:36:17,104 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30913.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:36:18,327 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.660e+02 3.834e+02 4.757e+02 6.314e+02 9.786e+02, threshold=9.513e+02, percent-clipped=0.0 +2023-03-28 09:36:38,710 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5594, 2.7476, 3.1206, 2.9300, 2.6368, 2.6310, 2.7463, 2.9838], + device='cuda:0'), covar=tensor([0.0341, 0.0244, 0.0207, 0.0215, 0.0292, 0.0323, 0.0304, 0.0236], + device='cuda:0'), in_proj_covar=tensor([0.0060, 0.0057, 0.0061, 0.0053, 0.0067, 0.0063, 0.0079, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-03-28 09:36:42,553 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30925.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:36:53,360 INFO [train.py:892] (0/4) Epoch 17, batch 1250, loss[loss=0.1983, simple_loss=0.2602, pruned_loss=0.06823, over 19781.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.267, pruned_loss=0.06574, over 3942667.60 frames. ], batch size: 213, lr: 9.23e-03, grad_scale: 16.0 +2023-03-28 09:37:01,608 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30934.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:37:14,596 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7465, 2.6627, 1.5366, 3.2472, 3.0475, 3.1830, 3.2405, 2.5991], + device='cuda:0'), covar=tensor([0.0636, 0.0773, 0.1867, 0.0560, 0.0623, 0.0427, 0.0584, 0.0839], + device='cuda:0'), in_proj_covar=tensor([0.0133, 0.0131, 0.0137, 0.0135, 0.0116, 0.0116, 0.0132, 0.0135], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 09:38:10,891 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30964.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:38:34,929 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-28 09:38:45,949 INFO [train.py:892] (0/4) Epoch 17, batch 1300, loss[loss=0.2078, simple_loss=0.2742, pruned_loss=0.07075, over 19785.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2672, pruned_loss=0.066, over 3943558.77 frames. ], batch size: 173, lr: 9.23e-03, grad_scale: 16.0 +2023-03-28 09:39:00,802 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30986.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:39:22,092 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30995.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:39:30,444 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-28 09:39:56,584 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31011.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 09:39:56,709 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31011.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 09:40:01,503 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.052e+02 4.725e+02 5.560e+02 6.623e+02 9.875e+02, threshold=1.112e+03, percent-clipped=1.0 +2023-03-28 09:40:06,158 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31016.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:40:17,647 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0621, 4.1673, 2.4613, 4.3460, 4.5591, 1.9327, 3.7651, 3.3202], + device='cuda:0'), covar=tensor([0.0641, 0.0779, 0.2641, 0.0773, 0.0453, 0.2973, 0.1001, 0.0743], + device='cuda:0'), in_proj_covar=tensor([0.0215, 0.0239, 0.0220, 0.0239, 0.0211, 0.0201, 0.0232, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 09:40:39,842 INFO [train.py:892] (0/4) Epoch 17, batch 1350, loss[loss=0.1786, simple_loss=0.2554, pruned_loss=0.05086, over 19577.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.267, pruned_loss=0.06555, over 3944459.90 frames. ], batch size: 49, lr: 9.22e-03, grad_scale: 16.0 +2023-03-28 09:41:54,495 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31064.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:42:13,084 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31072.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 09:42:31,181 INFO [train.py:892] (0/4) Epoch 17, batch 1400, loss[loss=0.206, simple_loss=0.277, pruned_loss=0.0675, over 19901.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2662, pruned_loss=0.06521, over 3944851.43 frames. ], batch size: 62, lr: 9.21e-03, grad_scale: 16.0 +2023-03-28 09:42:58,763 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2764, 2.4582, 3.7506, 3.3631, 3.7009, 3.8627, 3.6350, 3.5984], + device='cuda:0'), covar=tensor([0.0341, 0.0780, 0.0104, 0.0486, 0.0113, 0.0185, 0.0154, 0.0139], + device='cuda:0'), in_proj_covar=tensor([0.0086, 0.0094, 0.0076, 0.0146, 0.0072, 0.0086, 0.0080, 0.0073], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 09:43:40,800 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.284e+02 4.073e+02 5.032e+02 6.013e+02 1.066e+03, threshold=1.006e+03, percent-clipped=0.0 +2023-03-28 09:44:19,114 INFO [train.py:892] (0/4) Epoch 17, batch 1450, loss[loss=0.225, simple_loss=0.2872, pruned_loss=0.08143, over 19734.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2672, pruned_loss=0.06592, over 3946510.33 frames. ], batch size: 291, lr: 9.20e-03, grad_scale: 32.0 +2023-03-28 09:46:17,141 INFO [train.py:892] (0/4) Epoch 17, batch 1500, loss[loss=0.1948, simple_loss=0.263, pruned_loss=0.06329, over 19711.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2674, pruned_loss=0.06589, over 3946236.33 frames. ], batch size: 81, lr: 9.20e-03, grad_scale: 32.0 +2023-03-28 09:47:32,198 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.841e+02 4.138e+02 4.953e+02 5.911e+02 9.562e+02, threshold=9.905e+02, percent-clipped=0.0 +2023-03-28 09:48:06,362 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.63 vs. limit=5.0 +2023-03-28 09:48:10,817 INFO [train.py:892] (0/4) Epoch 17, batch 1550, loss[loss=0.1681, simple_loss=0.2412, pruned_loss=0.04755, over 19713.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2669, pruned_loss=0.06505, over 3945538.25 frames. ], batch size: 109, lr: 9.19e-03, grad_scale: 32.0 +2023-03-28 09:48:40,230 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-28 09:48:51,903 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-28 09:49:12,642 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31259.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:49:45,191 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4002, 3.5338, 3.9027, 4.4599, 3.0181, 3.3366, 2.8601, 2.6353], + device='cuda:0'), covar=tensor([0.0446, 0.2382, 0.0847, 0.0292, 0.2036, 0.0941, 0.1195, 0.1753], + device='cuda:0'), in_proj_covar=tensor([0.0221, 0.0337, 0.0237, 0.0179, 0.0241, 0.0193, 0.0206, 0.0213], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 09:50:00,507 INFO [train.py:892] (0/4) Epoch 17, batch 1600, loss[loss=0.2032, simple_loss=0.2678, pruned_loss=0.06935, over 19799.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.265, pruned_loss=0.06405, over 3946981.90 frames. ], batch size: 172, lr: 9.18e-03, grad_scale: 32.0 +2023-03-28 09:50:02,884 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31281.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:50:23,541 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31290.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:51:07,237 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31311.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:51:07,379 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2619, 3.2746, 3.6117, 2.7543, 3.7673, 3.0080, 3.3535, 3.5010], + device='cuda:0'), covar=tensor([0.0668, 0.0417, 0.0531, 0.0712, 0.0250, 0.0352, 0.0393, 0.0341], + device='cuda:0'), in_proj_covar=tensor([0.0065, 0.0071, 0.0069, 0.0099, 0.0067, 0.0065, 0.0064, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 09:51:12,798 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.169e+02 4.273e+02 5.402e+02 6.708e+02 1.273e+03, threshold=1.080e+03, percent-clipped=3.0 +2023-03-28 09:51:41,228 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9399, 2.5450, 3.1156, 3.3207, 3.8863, 4.3519, 4.1798, 4.3580], + device='cuda:0'), covar=tensor([0.0927, 0.1774, 0.1172, 0.0581, 0.0323, 0.0166, 0.0313, 0.0327], + device='cuda:0'), in_proj_covar=tensor([0.0146, 0.0166, 0.0166, 0.0138, 0.0119, 0.0113, 0.0107, 0.0102], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 09:51:54,929 INFO [train.py:892] (0/4) Epoch 17, batch 1650, loss[loss=0.2448, simple_loss=0.3062, pruned_loss=0.09171, over 19692.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2649, pruned_loss=0.06409, over 3945803.34 frames. ], batch size: 325, lr: 9.18e-03, grad_scale: 32.0 +2023-03-28 09:52:57,198 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31359.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:53:13,376 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31367.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 09:53:43,718 INFO [train.py:892] (0/4) Epoch 17, batch 1700, loss[loss=0.2376, simple_loss=0.301, pruned_loss=0.08713, over 19738.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2643, pruned_loss=0.06354, over 3946507.04 frames. ], batch size: 219, lr: 9.17e-03, grad_scale: 32.0 +2023-03-28 09:54:48,701 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31409.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:54:59,294 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.015e+02 4.580e+02 5.253e+02 6.145e+02 1.351e+03, threshold=1.051e+03, percent-clipped=1.0 +2023-03-28 09:55:32,405 INFO [train.py:892] (0/4) Epoch 17, batch 1750, loss[loss=0.1912, simple_loss=0.2613, pruned_loss=0.06055, over 19732.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.265, pruned_loss=0.06375, over 3946786.29 frames. ], batch size: 219, lr: 9.16e-03, grad_scale: 32.0 +2023-03-28 09:56:11,814 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.9283, 2.2034, 2.0022, 1.3743, 2.0414, 2.1830, 2.0810, 2.1793], + device='cuda:0'), covar=tensor([0.0314, 0.0220, 0.0266, 0.0528, 0.0375, 0.0203, 0.0212, 0.0191], + device='cuda:0'), in_proj_covar=tensor([0.0078, 0.0074, 0.0082, 0.0087, 0.0090, 0.0065, 0.0062, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 09:56:48,036 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31470.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 09:56:57,082 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8526, 2.8354, 2.9940, 2.3409, 3.0195, 2.5993, 2.9583, 2.9292], + device='cuda:0'), covar=tensor([0.0484, 0.0365, 0.0428, 0.0696, 0.0329, 0.0334, 0.0367, 0.0320], + device='cuda:0'), in_proj_covar=tensor([0.0064, 0.0070, 0.0068, 0.0098, 0.0066, 0.0064, 0.0063, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 09:57:07,927 INFO [train.py:892] (0/4) Epoch 17, batch 1800, loss[loss=0.1912, simple_loss=0.2501, pruned_loss=0.06613, over 19850.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.266, pruned_loss=0.06428, over 3945381.95 frames. ], batch size: 145, lr: 9.15e-03, grad_scale: 32.0 +2023-03-28 09:57:26,788 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31491.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 09:57:43,806 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.8842, 2.1825, 1.9704, 1.2813, 1.9568, 2.1105, 2.0050, 2.1666], + device='cuda:0'), covar=tensor([0.0274, 0.0194, 0.0248, 0.0548, 0.0362, 0.0202, 0.0183, 0.0182], + device='cuda:0'), in_proj_covar=tensor([0.0077, 0.0073, 0.0081, 0.0086, 0.0089, 0.0064, 0.0061, 0.0064], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 09:58:07,132 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.684e+02 3.864e+02 5.000e+02 5.920e+02 9.753e+02, threshold=1.000e+03, percent-clipped=0.0 +2023-03-28 09:58:38,262 INFO [train.py:892] (0/4) Epoch 17, batch 1850, loss[loss=0.2115, simple_loss=0.2912, pruned_loss=0.06593, over 19591.00 frames. ], tot_loss[loss=0.197, simple_loss=0.267, pruned_loss=0.0635, over 3944960.54 frames. ], batch size: 53, lr: 9.15e-03, grad_scale: 32.0 +2023-03-28 09:58:45,723 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-17.pt +2023-03-28 09:59:46,576 INFO [train.py:892] (0/4) Epoch 18, batch 0, loss[loss=0.1799, simple_loss=0.2549, pruned_loss=0.05248, over 19888.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2549, pruned_loss=0.05248, over 19888.00 frames. ], batch size: 47, lr: 8.89e-03, grad_scale: 32.0 +2023-03-28 09:59:46,578 INFO [train.py:917] (0/4) Computing validation loss +2023-03-28 10:00:15,456 INFO [train.py:926] (0/4) Epoch 18, validation: loss=0.171, simple_loss=0.2489, pruned_loss=0.04657, over 2883724.00 frames. +2023-03-28 10:00:15,457 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-28 10:00:43,158 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5791, 4.2642, 4.4073, 4.1212, 4.5923, 3.2028, 3.7851, 2.2712], + device='cuda:0'), covar=tensor([0.0184, 0.0211, 0.0126, 0.0161, 0.0122, 0.0819, 0.0726, 0.1466], + device='cuda:0'), in_proj_covar=tensor([0.0094, 0.0130, 0.0103, 0.0124, 0.0110, 0.0124, 0.0135, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 10:00:52,968 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31552.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 10:01:09,321 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31559.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:02:01,490 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31581.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:02:11,843 INFO [train.py:892] (0/4) Epoch 18, batch 50, loss[loss=0.1712, simple_loss=0.2421, pruned_loss=0.05009, over 19775.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2536, pruned_loss=0.05856, over 892912.89 frames. ], batch size: 69, lr: 8.88e-03, grad_scale: 32.0 +2023-03-28 10:02:21,589 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31590.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:02:47,512 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3910, 4.0244, 4.0483, 4.4068, 4.1011, 4.4462, 4.4913, 4.6660], + device='cuda:0'), covar=tensor([0.0666, 0.0355, 0.0502, 0.0298, 0.0621, 0.0422, 0.0409, 0.0314], + device='cuda:0'), in_proj_covar=tensor([0.0141, 0.0160, 0.0187, 0.0157, 0.0158, 0.0139, 0.0141, 0.0179], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 10:02:52,015 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4723, 3.5630, 2.1733, 3.7221, 3.8505, 1.7149, 3.0667, 3.0091], + device='cuda:0'), covar=tensor([0.0796, 0.0990, 0.2756, 0.0844, 0.0563, 0.3073, 0.1336, 0.0802], + device='cuda:0'), in_proj_covar=tensor([0.0217, 0.0239, 0.0220, 0.0241, 0.0212, 0.0203, 0.0230, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 10:03:03,283 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31607.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:03:16,245 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.475e+02 3.973e+02 4.915e+02 6.184e+02 1.019e+03, threshold=9.829e+02, percent-clipped=1.0 +2023-03-28 10:03:52,325 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31629.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:04:05,502 INFO [train.py:892] (0/4) Epoch 18, batch 100, loss[loss=0.2038, simple_loss=0.2746, pruned_loss=0.06653, over 19795.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2552, pruned_loss=0.05966, over 1572159.03 frames. ], batch size: 51, lr: 8.87e-03, grad_scale: 32.0 +2023-03-28 10:04:10,150 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31638.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:04:57,918 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9402, 3.7817, 4.2840, 4.0418, 4.2197, 3.6077, 3.9846, 3.8298], + device='cuda:0'), covar=tensor([0.1511, 0.1775, 0.1058, 0.1331, 0.1101, 0.1282, 0.2132, 0.2381], + device='cuda:0'), in_proj_covar=tensor([0.0266, 0.0276, 0.0328, 0.0260, 0.0240, 0.0240, 0.0321, 0.0352], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 10:05:17,216 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31667.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 10:06:00,558 INFO [train.py:892] (0/4) Epoch 18, batch 150, loss[loss=0.1834, simple_loss=0.2576, pruned_loss=0.05465, over 19651.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2593, pruned_loss=0.06137, over 2099750.29 frames. ], batch size: 47, lr: 8.86e-03, grad_scale: 32.0 +2023-03-28 10:06:42,017 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31704.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:07:06,645 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.935e+02 4.429e+02 5.296e+02 6.261e+02 1.038e+03, threshold=1.059e+03, percent-clipped=5.0 +2023-03-28 10:07:07,532 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31715.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 10:07:37,014 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2890, 3.1164, 3.2351, 2.3792, 3.4862, 2.7803, 2.9844, 3.3783], + device='cuda:0'), covar=tensor([0.0396, 0.0321, 0.0589, 0.0789, 0.0277, 0.0328, 0.0426, 0.0463], + device='cuda:0'), in_proj_covar=tensor([0.0065, 0.0071, 0.0069, 0.0100, 0.0066, 0.0066, 0.0065, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 10:07:53,075 INFO [train.py:892] (0/4) Epoch 18, batch 200, loss[loss=0.1788, simple_loss=0.2494, pruned_loss=0.05406, over 19837.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2607, pruned_loss=0.06211, over 2510154.25 frames. ], batch size: 75, lr: 8.86e-03, grad_scale: 16.0 +2023-03-28 10:09:00,189 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31765.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:09:00,424 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31765.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:09:47,150 INFO [train.py:892] (0/4) Epoch 18, batch 250, loss[loss=0.2049, simple_loss=0.2724, pruned_loss=0.06873, over 19723.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2599, pruned_loss=0.0616, over 2829937.90 frames. ], batch size: 219, lr: 8.85e-03, grad_scale: 16.0 +2023-03-28 10:09:52,953 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31788.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:10:27,230 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-03-28 10:10:52,980 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.400e+02 4.013e+02 4.672e+02 5.827e+02 1.248e+03, threshold=9.344e+02, percent-clipped=1.0 +2023-03-28 10:11:14,258 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31823.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:11:20,161 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1315, 2.0533, 2.4885, 2.4318, 2.1000, 2.3113, 2.2924, 2.4581], + device='cuda:0'), covar=tensor([0.0326, 0.0307, 0.0244, 0.0195, 0.0368, 0.0268, 0.0391, 0.0255], + device='cuda:0'), in_proj_covar=tensor([0.0061, 0.0057, 0.0061, 0.0053, 0.0067, 0.0062, 0.0079, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-03-28 10:11:41,799 INFO [train.py:892] (0/4) Epoch 18, batch 300, loss[loss=0.2153, simple_loss=0.2871, pruned_loss=0.07176, over 19567.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2613, pruned_loss=0.06246, over 3078780.70 frames. ], batch size: 60, lr: 8.84e-03, grad_scale: 16.0 +2023-03-28 10:12:08,201 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31847.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 10:12:12,883 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31849.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:12:34,745 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1114, 4.1819, 4.6061, 4.2002, 4.0524, 4.5382, 4.3449, 4.7511], + device='cuda:0'), covar=tensor([0.1286, 0.0455, 0.0520, 0.0420, 0.0865, 0.0508, 0.0525, 0.0413], + device='cuda:0'), in_proj_covar=tensor([0.0268, 0.0207, 0.0209, 0.0215, 0.0193, 0.0218, 0.0214, 0.0197], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 10:12:42,721 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.8602, 1.9422, 2.1949, 2.2113, 1.9605, 2.0924, 2.1513, 2.1684], + device='cuda:0'), covar=tensor([0.0254, 0.0246, 0.0210, 0.0173, 0.0313, 0.0226, 0.0289, 0.0226], + device='cuda:0'), in_proj_covar=tensor([0.0060, 0.0056, 0.0060, 0.0052, 0.0066, 0.0061, 0.0077, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-03-28 10:13:18,374 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31878.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:13:32,318 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31884.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:13:35,358 INFO [train.py:892] (0/4) Epoch 18, batch 350, loss[loss=0.1772, simple_loss=0.2482, pruned_loss=0.05306, over 19755.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2601, pruned_loss=0.0618, over 3272693.32 frames. ], batch size: 213, lr: 8.84e-03, grad_scale: 16.0 +2023-03-28 10:14:41,485 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.968e+02 4.069e+02 4.792e+02 5.707e+02 1.077e+03, threshold=9.584e+02, percent-clipped=3.0 +2023-03-28 10:14:42,311 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31915.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:15:10,503 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-28 10:15:31,561 INFO [train.py:892] (0/4) Epoch 18, batch 400, loss[loss=0.2021, simple_loss=0.2951, pruned_loss=0.05454, over 19672.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2606, pruned_loss=0.0617, over 3422216.38 frames. ], batch size: 55, lr: 8.83e-03, grad_scale: 16.0 +2023-03-28 10:15:39,002 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31939.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:16:04,073 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31950.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:16:22,207 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-28 10:17:02,894 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31976.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:17:23,487 INFO [train.py:892] (0/4) Epoch 18, batch 450, loss[loss=0.1932, simple_loss=0.275, pruned_loss=0.0557, over 19958.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2616, pruned_loss=0.06208, over 3539864.83 frames. ], batch size: 53, lr: 8.82e-03, grad_scale: 16.0 +2023-03-28 10:17:57,247 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-32000.pt +2023-03-28 10:18:28,266 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32011.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:18:36,657 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.048e+02 4.283e+02 4.991e+02 5.829e+02 1.431e+03, threshold=9.983e+02, percent-clipped=4.0 +2023-03-28 10:18:48,764 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-28 10:19:01,773 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.4895, 5.8156, 6.0703, 5.8865, 5.6779, 5.5684, 5.7074, 5.6915], + device='cuda:0'), covar=tensor([0.1487, 0.1214, 0.0871, 0.0981, 0.0686, 0.0729, 0.2015, 0.1838], + device='cuda:0'), in_proj_covar=tensor([0.0271, 0.0282, 0.0335, 0.0262, 0.0245, 0.0243, 0.0326, 0.0358], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 10:19:06,506 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3890, 2.4331, 2.6667, 2.5328, 2.8734, 2.8566, 3.3104, 3.4910], + device='cuda:0'), covar=tensor([0.0667, 0.1577, 0.1530, 0.1863, 0.1419, 0.1301, 0.0579, 0.0537], + device='cuda:0'), in_proj_covar=tensor([0.0224, 0.0225, 0.0246, 0.0237, 0.0272, 0.0236, 0.0200, 0.0216], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 10:19:26,551 INFO [train.py:892] (0/4) Epoch 18, batch 500, loss[loss=0.1798, simple_loss=0.2594, pruned_loss=0.05004, over 19814.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2619, pruned_loss=0.06226, over 3630822.10 frames. ], batch size: 67, lr: 8.82e-03, grad_scale: 16.0 +2023-03-28 10:19:42,459 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.59 vs. limit=2.0 +2023-03-28 10:20:20,678 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32060.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:20:33,308 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32065.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:21:12,677 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5417, 3.4054, 3.8147, 2.8705, 3.9325, 3.2862, 3.2212, 3.7427], + device='cuda:0'), covar=tensor([0.0696, 0.0359, 0.0409, 0.0652, 0.0351, 0.0271, 0.0503, 0.0312], + device='cuda:0'), in_proj_covar=tensor([0.0064, 0.0070, 0.0069, 0.0098, 0.0065, 0.0065, 0.0065, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 10:21:22,483 INFO [train.py:892] (0/4) Epoch 18, batch 550, loss[loss=0.1663, simple_loss=0.2425, pruned_loss=0.04505, over 19944.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2618, pruned_loss=0.06238, over 3700423.09 frames. ], batch size: 46, lr: 8.81e-03, grad_scale: 16.0 +2023-03-28 10:22:25,784 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32113.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:22:30,526 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.880e+02 4.056e+02 4.821e+02 6.235e+02 9.910e+02, threshold=9.642e+02, percent-clipped=0.0 +2023-03-28 10:23:18,292 INFO [train.py:892] (0/4) Epoch 18, batch 600, loss[loss=0.202, simple_loss=0.2768, pruned_loss=0.06359, over 19786.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2637, pruned_loss=0.06311, over 3754842.30 frames. ], batch size: 48, lr: 8.80e-03, grad_scale: 16.0 +2023-03-28 10:23:38,007 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32144.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:23:44,558 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32147.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 10:24:56,427 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32179.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:25:12,431 INFO [train.py:892] (0/4) Epoch 18, batch 650, loss[loss=0.185, simple_loss=0.2539, pruned_loss=0.05803, over 19775.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2623, pruned_loss=0.06242, over 3798114.18 frames. ], batch size: 87, lr: 8.80e-03, grad_scale: 16.0 +2023-03-28 10:25:36,377 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32195.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 10:25:39,051 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-03-28 10:26:20,622 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.941e+02 4.475e+02 5.110e+02 5.947e+02 1.058e+03, threshold=1.022e+03, percent-clipped=2.0 +2023-03-28 10:26:31,920 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-28 10:26:33,938 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7379, 2.8041, 4.4568, 3.7789, 4.1808, 4.3277, 4.1532, 4.1305], + device='cuda:0'), covar=tensor([0.0321, 0.0784, 0.0079, 0.0864, 0.0122, 0.0204, 0.0157, 0.0121], + device='cuda:0'), in_proj_covar=tensor([0.0088, 0.0096, 0.0078, 0.0149, 0.0074, 0.0087, 0.0083, 0.0075], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 10:27:06,413 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32234.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:27:09,561 INFO [train.py:892] (0/4) Epoch 18, batch 700, loss[loss=0.1925, simple_loss=0.2551, pruned_loss=0.06497, over 19796.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2629, pruned_loss=0.06257, over 3831366.29 frames. ], batch size: 191, lr: 8.79e-03, grad_scale: 16.0 +2023-03-28 10:28:31,620 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32271.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:29:04,745 INFO [train.py:892] (0/4) Epoch 18, batch 750, loss[loss=0.1921, simple_loss=0.2596, pruned_loss=0.06228, over 19807.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2616, pruned_loss=0.06161, over 3857028.14 frames. ], batch size: 86, lr: 8.78e-03, grad_scale: 16.0 +2023-03-28 10:29:52,412 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32306.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:30:09,294 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.935e+02 4.187e+02 5.149e+02 5.952e+02 1.160e+03, threshold=1.030e+03, percent-clipped=2.0 +2023-03-28 10:30:12,481 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3681, 4.3492, 4.7830, 4.5766, 4.6648, 4.2560, 4.5118, 4.3081], + device='cuda:0'), covar=tensor([0.1549, 0.1623, 0.1007, 0.1297, 0.0890, 0.0902, 0.2083, 0.2296], + device='cuda:0'), in_proj_covar=tensor([0.0269, 0.0282, 0.0336, 0.0261, 0.0244, 0.0241, 0.0324, 0.0359], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 10:30:36,374 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6221, 2.7644, 3.1064, 3.0181, 2.6105, 2.4601, 2.7564, 3.0436], + device='cuda:0'), covar=tensor([0.0255, 0.0360, 0.0239, 0.0229, 0.0360, 0.0385, 0.0367, 0.0289], + device='cuda:0'), in_proj_covar=tensor([0.0062, 0.0057, 0.0062, 0.0054, 0.0068, 0.0063, 0.0080, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-03-28 10:30:59,319 INFO [train.py:892] (0/4) Epoch 18, batch 800, loss[loss=0.2096, simple_loss=0.2728, pruned_loss=0.07321, over 19780.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2624, pruned_loss=0.06215, over 3876986.01 frames. ], batch size: 247, lr: 8.78e-03, grad_scale: 16.0 +2023-03-28 10:31:51,765 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32360.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:32:45,009 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3660, 4.4404, 2.6920, 4.6879, 4.9584, 2.0495, 4.0622, 3.5059], + device='cuda:0'), covar=tensor([0.0564, 0.0715, 0.2380, 0.0634, 0.0310, 0.2686, 0.0856, 0.0720], + device='cuda:0'), in_proj_covar=tensor([0.0216, 0.0237, 0.0220, 0.0241, 0.0212, 0.0199, 0.0229, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 10:32:47,925 INFO [train.py:892] (0/4) Epoch 18, batch 850, loss[loss=0.1907, simple_loss=0.2535, pruned_loss=0.06396, over 19791.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2638, pruned_loss=0.06252, over 3891347.49 frames. ], batch size: 174, lr: 8.77e-03, grad_scale: 16.0 +2023-03-28 10:33:20,674 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1830, 4.2612, 2.4686, 4.5263, 4.6941, 1.9771, 3.8383, 3.4071], + device='cuda:0'), covar=tensor([0.0641, 0.0809, 0.2750, 0.0738, 0.0457, 0.2814, 0.1079, 0.0779], + device='cuda:0'), in_proj_covar=tensor([0.0215, 0.0237, 0.0219, 0.0240, 0.0212, 0.0199, 0.0228, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 10:33:36,802 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32408.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:33:53,383 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.074e+02 4.132e+02 4.901e+02 5.957e+02 1.046e+03, threshold=9.802e+02, percent-clipped=1.0 +2023-03-28 10:34:11,246 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8322, 2.7542, 1.5319, 3.2879, 3.0688, 3.2439, 3.3521, 2.6255], + device='cuda:0'), covar=tensor([0.0596, 0.0627, 0.1685, 0.0513, 0.0556, 0.0365, 0.0508, 0.0751], + device='cuda:0'), in_proj_covar=tensor([0.0130, 0.0129, 0.0136, 0.0134, 0.0117, 0.0115, 0.0130, 0.0134], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 10:34:39,912 INFO [train.py:892] (0/4) Epoch 18, batch 900, loss[loss=0.2751, simple_loss=0.3742, pruned_loss=0.08797, over 18778.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2644, pruned_loss=0.06261, over 3903577.55 frames. ], batch size: 564, lr: 8.76e-03, grad_scale: 16.0 +2023-03-28 10:34:40,715 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6720, 3.7133, 4.0938, 3.6699, 3.5339, 3.9452, 3.7522, 4.1711], + device='cuda:0'), covar=tensor([0.1110, 0.0413, 0.0416, 0.0452, 0.1123, 0.0563, 0.0556, 0.0374], + device='cuda:0'), in_proj_covar=tensor([0.0266, 0.0207, 0.0207, 0.0217, 0.0196, 0.0219, 0.0215, 0.0194], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 10:34:57,780 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32444.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:36:19,887 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32479.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:36:36,669 INFO [train.py:892] (0/4) Epoch 18, batch 950, loss[loss=0.1906, simple_loss=0.2613, pruned_loss=0.05996, over 19897.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2643, pruned_loss=0.06269, over 3914140.81 frames. ], batch size: 94, lr: 8.76e-03, grad_scale: 16.0 +2023-03-28 10:36:53,316 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32492.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:37:41,376 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.554e+02 4.412e+02 5.212e+02 6.577e+02 2.177e+03, threshold=1.042e+03, percent-clipped=5.0 +2023-03-28 10:38:01,474 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.91 vs. limit=5.0 +2023-03-28 10:38:11,119 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32527.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:38:28,356 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32534.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:38:32,107 INFO [train.py:892] (0/4) Epoch 18, batch 1000, loss[loss=0.1803, simple_loss=0.2515, pruned_loss=0.05458, over 19725.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2626, pruned_loss=0.06207, over 3923726.68 frames. ], batch size: 71, lr: 8.75e-03, grad_scale: 16.0 +2023-03-28 10:38:39,681 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3209, 4.3258, 2.6696, 4.6051, 4.8237, 2.0941, 3.9184, 3.4708], + device='cuda:0'), covar=tensor([0.0561, 0.0724, 0.2595, 0.0646, 0.0373, 0.2844, 0.0917, 0.0746], + device='cuda:0'), in_proj_covar=tensor([0.0215, 0.0235, 0.0219, 0.0240, 0.0212, 0.0199, 0.0229, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 10:39:52,639 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32571.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:40:16,171 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32582.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:40:24,631 INFO [train.py:892] (0/4) Epoch 18, batch 1050, loss[loss=0.1781, simple_loss=0.2447, pruned_loss=0.05578, over 19738.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2618, pruned_loss=0.06189, over 3930237.14 frames. ], batch size: 118, lr: 8.74e-03, grad_scale: 16.0 +2023-03-28 10:41:12,698 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32606.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:41:30,004 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.098e+02 4.345e+02 4.935e+02 6.065e+02 2.066e+03, threshold=9.870e+02, percent-clipped=4.0 +2023-03-28 10:41:38,186 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32619.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:42:16,177 INFO [train.py:892] (0/4) Epoch 18, batch 1100, loss[loss=0.1609, simple_loss=0.2382, pruned_loss=0.04183, over 19886.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2627, pruned_loss=0.06243, over 3934685.23 frames. ], batch size: 87, lr: 8.74e-03, grad_scale: 16.0 +2023-03-28 10:42:54,906 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32654.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:43:18,961 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9572, 4.0411, 2.3832, 4.3542, 4.4667, 1.8524, 3.5872, 3.2862], + device='cuda:0'), covar=tensor([0.0641, 0.0822, 0.2693, 0.0621, 0.0446, 0.2845, 0.1149, 0.0766], + device='cuda:0'), in_proj_covar=tensor([0.0218, 0.0239, 0.0222, 0.0242, 0.0216, 0.0201, 0.0233, 0.0178], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 10:44:04,802 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.1479, 4.8220, 4.8241, 5.2306, 4.7388, 5.4290, 5.2864, 5.4872], + device='cuda:0'), covar=tensor([0.0555, 0.0297, 0.0370, 0.0253, 0.0560, 0.0286, 0.0317, 0.0257], + device='cuda:0'), in_proj_covar=tensor([0.0137, 0.0157, 0.0181, 0.0153, 0.0157, 0.0137, 0.0136, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 10:44:10,629 INFO [train.py:892] (0/4) Epoch 18, batch 1150, loss[loss=0.1713, simple_loss=0.2473, pruned_loss=0.04766, over 19751.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2643, pruned_loss=0.06345, over 3936828.58 frames. ], batch size: 97, lr: 8.73e-03, grad_scale: 16.0 +2023-03-28 10:45:04,002 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7006, 3.3411, 3.4954, 3.7525, 3.4894, 3.7204, 3.8271, 3.9974], + device='cuda:0'), covar=tensor([0.0665, 0.0432, 0.0563, 0.0340, 0.0710, 0.0528, 0.0385, 0.0315], + device='cuda:0'), in_proj_covar=tensor([0.0137, 0.0158, 0.0182, 0.0154, 0.0157, 0.0137, 0.0137, 0.0178], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 10:45:16,102 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.651e+02 4.519e+02 5.355e+02 6.274e+02 1.728e+03, threshold=1.071e+03, percent-clipped=4.0 +2023-03-28 10:46:02,491 INFO [train.py:892] (0/4) Epoch 18, batch 1200, loss[loss=0.1915, simple_loss=0.2562, pruned_loss=0.06336, over 19853.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2649, pruned_loss=0.0637, over 3938476.19 frames. ], batch size: 43, lr: 8.72e-03, grad_scale: 16.0 +2023-03-28 10:46:06,420 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.98 vs. limit=5.0 +2023-03-28 10:46:18,120 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.65 vs. limit=5.0 +2023-03-28 10:47:49,304 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-03-28 10:48:00,615 INFO [train.py:892] (0/4) Epoch 18, batch 1250, loss[loss=0.2555, simple_loss=0.3176, pruned_loss=0.09671, over 19717.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2647, pruned_loss=0.06365, over 3941146.31 frames. ], batch size: 295, lr: 8.72e-03, grad_scale: 16.0 +2023-03-28 10:49:03,799 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.128e+02 4.132e+02 4.830e+02 5.865e+02 8.224e+02, threshold=9.660e+02, percent-clipped=0.0 +2023-03-28 10:49:53,731 INFO [train.py:892] (0/4) Epoch 18, batch 1300, loss[loss=0.1934, simple_loss=0.2588, pruned_loss=0.06403, over 19635.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2642, pruned_loss=0.06329, over 3943566.10 frames. ], batch size: 68, lr: 8.71e-03, grad_scale: 16.0 +2023-03-28 10:51:08,430 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.3051, 2.7434, 2.3556, 1.7258, 2.3758, 2.6389, 2.5731, 2.6438], + device='cuda:0'), covar=tensor([0.0294, 0.0219, 0.0254, 0.0507, 0.0362, 0.0209, 0.0225, 0.0186], + device='cuda:0'), in_proj_covar=tensor([0.0078, 0.0074, 0.0083, 0.0087, 0.0090, 0.0065, 0.0063, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 10:51:21,904 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.09 vs. limit=5.0 +2023-03-28 10:51:47,877 INFO [train.py:892] (0/4) Epoch 18, batch 1350, loss[loss=0.1725, simple_loss=0.2496, pruned_loss=0.0477, over 19873.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2636, pruned_loss=0.06244, over 3943354.00 frames. ], batch size: 92, lr: 8.71e-03, grad_scale: 16.0 +2023-03-28 10:52:55,306 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.479e+02 4.452e+02 5.064e+02 6.052e+02 1.021e+03, threshold=1.013e+03, percent-clipped=2.0 +2023-03-28 10:53:44,828 INFO [train.py:892] (0/4) Epoch 18, batch 1400, loss[loss=0.1795, simple_loss=0.2446, pruned_loss=0.05725, over 19816.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2637, pruned_loss=0.06241, over 3943650.57 frames. ], batch size: 148, lr: 8.70e-03, grad_scale: 16.0 +2023-03-28 10:54:20,996 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1665, 3.4076, 3.5071, 4.2798, 2.7381, 3.4505, 2.6647, 2.4464], + device='cuda:0'), covar=tensor([0.0496, 0.1979, 0.1045, 0.0324, 0.2231, 0.0766, 0.1335, 0.1940], + device='cuda:0'), in_proj_covar=tensor([0.0225, 0.0336, 0.0235, 0.0181, 0.0242, 0.0195, 0.0208, 0.0216], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 10:54:52,795 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32966.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:55:38,297 INFO [train.py:892] (0/4) Epoch 18, batch 1450, loss[loss=0.1728, simple_loss=0.2424, pruned_loss=0.0516, over 19652.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2641, pruned_loss=0.06228, over 3943225.39 frames. ], batch size: 79, lr: 8.69e-03, grad_scale: 16.0 +2023-03-28 10:56:44,782 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.979e+02 4.312e+02 5.122e+02 6.077e+02 1.174e+03, threshold=1.024e+03, percent-clipped=2.0 +2023-03-28 10:57:11,834 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33027.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:57:32,874 INFO [train.py:892] (0/4) Epoch 18, batch 1500, loss[loss=0.2123, simple_loss=0.2777, pruned_loss=0.07349, over 19756.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.265, pruned_loss=0.06306, over 3943366.40 frames. ], batch size: 226, lr: 8.69e-03, grad_scale: 16.0 +2023-03-28 10:57:56,210 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5539, 2.5116, 4.0502, 3.5308, 3.8513, 4.0301, 3.8851, 3.8558], + device='cuda:0'), covar=tensor([0.0311, 0.0824, 0.0103, 0.0679, 0.0134, 0.0198, 0.0163, 0.0133], + device='cuda:0'), in_proj_covar=tensor([0.0089, 0.0096, 0.0079, 0.0150, 0.0075, 0.0088, 0.0083, 0.0076], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 10:58:45,667 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33069.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 10:59:25,355 INFO [train.py:892] (0/4) Epoch 18, batch 1550, loss[loss=0.1913, simple_loss=0.2618, pruned_loss=0.06042, over 19925.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2642, pruned_loss=0.06267, over 3945948.40 frames. ], batch size: 51, lr: 8.68e-03, grad_scale: 16.0 +2023-03-28 11:00:25,347 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.832e+02 4.176e+02 4.933e+02 6.239e+02 1.615e+03, threshold=9.866e+02, percent-clipped=3.0 +2023-03-28 11:00:59,130 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33130.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:01:09,753 INFO [train.py:892] (0/4) Epoch 18, batch 1600, loss[loss=0.1979, simple_loss=0.2683, pruned_loss=0.0638, over 19789.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2648, pruned_loss=0.06288, over 3946519.98 frames. ], batch size: 236, lr: 8.67e-03, grad_scale: 16.0 +2023-03-28 11:01:53,587 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33155.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:02:28,958 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6092, 3.7183, 2.2285, 3.8444, 3.9461, 1.8316, 3.2579, 2.9824], + device='cuda:0'), covar=tensor([0.0688, 0.0748, 0.2727, 0.0718, 0.0507, 0.2742, 0.1114, 0.0824], + device='cuda:0'), in_proj_covar=tensor([0.0217, 0.0238, 0.0221, 0.0243, 0.0216, 0.0199, 0.0232, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 11:02:55,058 INFO [train.py:892] (0/4) Epoch 18, batch 1650, loss[loss=0.184, simple_loss=0.2518, pruned_loss=0.05811, over 19832.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2638, pruned_loss=0.06244, over 3947031.09 frames. ], batch size: 166, lr: 8.67e-03, grad_scale: 16.0 +2023-03-28 11:04:01,026 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.567e+02 4.484e+02 5.220e+02 6.414e+02 1.768e+03, threshold=1.044e+03, percent-clipped=5.0 +2023-03-28 11:04:05,510 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33216.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:04:49,992 INFO [train.py:892] (0/4) Epoch 18, batch 1700, loss[loss=0.1957, simple_loss=0.2642, pruned_loss=0.0636, over 19780.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2637, pruned_loss=0.06218, over 3947760.61 frames. ], batch size: 211, lr: 8.66e-03, grad_scale: 16.0 +2023-03-28 11:04:57,506 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4092, 4.1411, 4.2055, 3.9638, 4.4210, 3.0277, 3.6872, 2.1006], + device='cuda:0'), covar=tensor([0.0226, 0.0232, 0.0163, 0.0188, 0.0137, 0.0904, 0.0714, 0.1504], + device='cuda:0'), in_proj_covar=tensor([0.0095, 0.0133, 0.0106, 0.0125, 0.0111, 0.0128, 0.0138, 0.0120], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 11:05:34,185 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1411, 2.8962, 2.9776, 3.1435, 3.0838, 2.9258, 3.2768, 3.4085], + device='cuda:0'), covar=tensor([0.0806, 0.0491, 0.0632, 0.0418, 0.0704, 0.0946, 0.0461, 0.0379], + device='cuda:0'), in_proj_covar=tensor([0.0140, 0.0161, 0.0185, 0.0156, 0.0160, 0.0140, 0.0139, 0.0179], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 11:06:44,503 INFO [train.py:892] (0/4) Epoch 18, batch 1750, loss[loss=0.1839, simple_loss=0.2609, pruned_loss=0.05346, over 19638.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2636, pruned_loss=0.06219, over 3948059.53 frames. ], batch size: 72, lr: 8.65e-03, grad_scale: 16.0 +2023-03-28 11:07:39,429 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6804, 2.5931, 1.4351, 3.1151, 2.8222, 3.0321, 3.0839, 2.4826], + device='cuda:0'), covar=tensor([0.0602, 0.0665, 0.1737, 0.0507, 0.0605, 0.0420, 0.0645, 0.0810], + device='cuda:0'), in_proj_covar=tensor([0.0131, 0.0129, 0.0136, 0.0132, 0.0117, 0.0116, 0.0128, 0.0133], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 11:07:40,442 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 4.001e+02 4.864e+02 5.584e+02 1.277e+03, threshold=9.729e+02, percent-clipped=1.0 +2023-03-28 11:07:54,352 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33322.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:08:21,750 INFO [train.py:892] (0/4) Epoch 18, batch 1800, loss[loss=0.1906, simple_loss=0.2506, pruned_loss=0.06528, over 19798.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.262, pruned_loss=0.06148, over 3949323.25 frames. ], batch size: 195, lr: 8.65e-03, grad_scale: 16.0 +2023-03-28 11:09:11,835 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-28 11:09:43,733 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.58 vs. limit=5.0 +2023-03-28 11:09:57,883 INFO [train.py:892] (0/4) Epoch 18, batch 1850, loss[loss=0.2184, simple_loss=0.3, pruned_loss=0.06839, over 19833.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2649, pruned_loss=0.06209, over 3948217.77 frames. ], batch size: 57, lr: 8.64e-03, grad_scale: 16.0 +2023-03-28 11:10:05,961 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-18.pt +2023-03-28 11:11:02,086 INFO [train.py:892] (0/4) Epoch 19, batch 0, loss[loss=0.1927, simple_loss=0.2568, pruned_loss=0.0643, over 19866.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2568, pruned_loss=0.0643, over 19866.00 frames. ], batch size: 154, lr: 8.41e-03, grad_scale: 16.0 +2023-03-28 11:11:02,088 INFO [train.py:917] (0/4) Computing validation loss +2023-03-28 11:11:35,982 INFO [train.py:926] (0/4) Epoch 19, validation: loss=0.1703, simple_loss=0.2482, pruned_loss=0.04619, over 2883724.00 frames. +2023-03-28 11:11:35,983 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-28 11:12:33,174 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.922e+02 3.899e+02 4.712e+02 6.072e+02 1.255e+03, threshold=9.424e+02, percent-clipped=1.0 +2023-03-28 11:12:54,528 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33425.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:13:17,523 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7578, 3.0427, 3.2363, 3.6342, 2.5353, 3.1458, 2.4601, 2.4503], + device='cuda:0'), covar=tensor([0.0496, 0.2001, 0.0911, 0.0372, 0.2046, 0.0754, 0.1240, 0.1621], + device='cuda:0'), in_proj_covar=tensor([0.0225, 0.0336, 0.0234, 0.0180, 0.0241, 0.0194, 0.0207, 0.0215], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 11:13:32,567 INFO [train.py:892] (0/4) Epoch 19, batch 50, loss[loss=0.1771, simple_loss=0.2495, pruned_loss=0.05238, over 19852.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2491, pruned_loss=0.05444, over 890942.53 frames. ], batch size: 104, lr: 8.40e-03, grad_scale: 16.0 +2023-03-28 11:14:40,744 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33472.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:15:25,827 INFO [train.py:892] (0/4) Epoch 19, batch 100, loss[loss=0.2371, simple_loss=0.3059, pruned_loss=0.08408, over 19623.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2587, pruned_loss=0.05991, over 1568768.44 frames. ], batch size: 351, lr: 8.39e-03, grad_scale: 16.0 +2023-03-28 11:16:09,496 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33511.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:16:17,079 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.445e+02 4.091e+02 4.969e+02 5.976e+02 1.351e+03, threshold=9.939e+02, percent-clipped=3.0 +2023-03-28 11:16:59,132 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33533.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:17:16,228 INFO [train.py:892] (0/4) Epoch 19, batch 150, loss[loss=0.1839, simple_loss=0.2465, pruned_loss=0.06063, over 19847.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2589, pruned_loss=0.05985, over 2096797.66 frames. ], batch size: 137, lr: 8.39e-03, grad_scale: 16.0 +2023-03-28 11:17:43,260 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7924, 2.6031, 2.9628, 2.7238, 3.0776, 3.0261, 3.6902, 3.9389], + device='cuda:0'), covar=tensor([0.0638, 0.1698, 0.1544, 0.2046, 0.1599, 0.1478, 0.0565, 0.0581], + device='cuda:0'), in_proj_covar=tensor([0.0230, 0.0231, 0.0252, 0.0242, 0.0278, 0.0243, 0.0208, 0.0223], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 11:19:06,862 INFO [train.py:892] (0/4) Epoch 19, batch 200, loss[loss=0.1729, simple_loss=0.2469, pruned_loss=0.04945, over 19863.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2593, pruned_loss=0.06013, over 2508681.97 frames. ], batch size: 46, lr: 8.38e-03, grad_scale: 16.0 +2023-03-28 11:19:09,738 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1299, 2.0438, 2.5886, 2.3208, 2.2992, 2.4224, 2.2658, 2.4576], + device='cuda:0'), covar=tensor([0.0292, 0.0412, 0.0201, 0.0273, 0.0349, 0.0281, 0.0371, 0.0268], + device='cuda:0'), in_proj_covar=tensor([0.0064, 0.0060, 0.0064, 0.0056, 0.0070, 0.0065, 0.0081, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-03-28 11:19:56,414 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.308e+02 4.349e+02 4.912e+02 5.878e+02 1.071e+03, threshold=9.825e+02, percent-clipped=1.0 +2023-03-28 11:20:15,498 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33622.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:20:45,602 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-03-28 11:20:54,825 INFO [train.py:892] (0/4) Epoch 19, batch 250, loss[loss=0.1937, simple_loss=0.2622, pruned_loss=0.06258, over 19743.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2616, pruned_loss=0.06074, over 2825611.91 frames. ], batch size: 259, lr: 8.38e-03, grad_scale: 16.0 +2023-03-28 11:21:47,322 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0370, 3.3701, 2.8602, 2.4515, 2.8709, 3.3354, 3.1326, 3.2740], + device='cuda:0'), covar=tensor([0.0264, 0.0248, 0.0271, 0.0473, 0.0324, 0.0222, 0.0174, 0.0176], + device='cuda:0'), in_proj_covar=tensor([0.0078, 0.0074, 0.0082, 0.0087, 0.0090, 0.0065, 0.0063, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 11:21:58,527 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33670.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:22:13,350 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33677.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:22:15,595 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2964, 4.3626, 2.5050, 4.6945, 4.9034, 2.1482, 4.1262, 3.5325], + device='cuda:0'), covar=tensor([0.0553, 0.0708, 0.2414, 0.0695, 0.0381, 0.2708, 0.0854, 0.0783], + device='cuda:0'), in_proj_covar=tensor([0.0215, 0.0240, 0.0222, 0.0243, 0.0216, 0.0200, 0.0230, 0.0178], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 11:22:29,250 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1111, 2.4508, 3.7476, 3.2865, 3.6332, 3.8334, 3.5398, 3.6609], + device='cuda:0'), covar=tensor([0.0404, 0.0824, 0.0087, 0.0488, 0.0114, 0.0181, 0.0157, 0.0131], + device='cuda:0'), in_proj_covar=tensor([0.0089, 0.0096, 0.0078, 0.0148, 0.0074, 0.0087, 0.0082, 0.0075], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 11:22:42,184 INFO [train.py:892] (0/4) Epoch 19, batch 300, loss[loss=0.1635, simple_loss=0.2389, pruned_loss=0.04409, over 19764.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2616, pruned_loss=0.06051, over 3074033.69 frames. ], batch size: 122, lr: 8.37e-03, grad_scale: 16.0 +2023-03-28 11:23:39,624 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.947e+02 4.475e+02 5.212e+02 6.221e+02 1.054e+03, threshold=1.042e+03, percent-clipped=4.0 +2023-03-28 11:24:02,772 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33725.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:24:16,385 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.56 vs. limit=2.0 +2023-03-28 11:24:31,797 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33738.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 11:24:38,598 INFO [train.py:892] (0/4) Epoch 19, batch 350, loss[loss=0.173, simple_loss=0.2434, pruned_loss=0.05135, over 19609.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2622, pruned_loss=0.06075, over 3268252.66 frames. ], batch size: 65, lr: 8.36e-03, grad_scale: 32.0 +2023-03-28 11:24:59,224 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3604, 4.8613, 4.8860, 5.2981, 4.9346, 5.6012, 5.4214, 5.6678], + device='cuda:0'), covar=tensor([0.0556, 0.0350, 0.0399, 0.0287, 0.0632, 0.0268, 0.0371, 0.0263], + device='cuda:0'), in_proj_covar=tensor([0.0135, 0.0158, 0.0181, 0.0152, 0.0157, 0.0136, 0.0137, 0.0174], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 11:25:51,477 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33773.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:25:56,543 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.60 vs. limit=2.0 +2023-03-28 11:26:05,886 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7725, 2.1975, 2.7448, 3.0949, 3.6132, 3.9296, 3.8416, 3.8670], + device='cuda:0'), covar=tensor([0.0975, 0.1909, 0.1340, 0.0654, 0.0396, 0.0223, 0.0281, 0.0346], + device='cuda:0'), in_proj_covar=tensor([0.0152, 0.0171, 0.0173, 0.0143, 0.0125, 0.0116, 0.0109, 0.0106], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 11:26:29,475 INFO [train.py:892] (0/4) Epoch 19, batch 400, loss[loss=0.1813, simple_loss=0.2461, pruned_loss=0.05827, over 19701.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2613, pruned_loss=0.06006, over 3419119.85 frames. ], batch size: 81, lr: 8.36e-03, grad_scale: 32.0 +2023-03-28 11:27:17,209 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33811.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:27:26,572 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.571e+02 3.967e+02 4.675e+02 5.503e+02 1.191e+03, threshold=9.350e+02, percent-clipped=1.0 +2023-03-28 11:27:27,500 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9643, 3.7909, 3.8377, 3.5915, 3.9544, 2.9251, 3.2540, 1.9885], + device='cuda:0'), covar=tensor([0.0217, 0.0232, 0.0153, 0.0204, 0.0155, 0.0960, 0.0751, 0.1592], + device='cuda:0'), in_proj_covar=tensor([0.0096, 0.0133, 0.0106, 0.0125, 0.0111, 0.0128, 0.0137, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 11:27:37,112 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.1578, 4.6906, 4.6742, 5.1132, 4.7574, 5.4153, 5.1885, 5.4275], + device='cuda:0'), covar=tensor([0.0559, 0.0345, 0.0433, 0.0285, 0.0567, 0.0255, 0.0390, 0.0258], + device='cuda:0'), in_proj_covar=tensor([0.0136, 0.0159, 0.0182, 0.0153, 0.0157, 0.0137, 0.0137, 0.0174], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 11:27:37,360 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3097, 3.4522, 4.8843, 3.6178, 3.9585, 3.8824, 2.5029, 2.8176], + device='cuda:0'), covar=tensor([0.0736, 0.2226, 0.0355, 0.0884, 0.1431, 0.0913, 0.2066, 0.2173], + device='cuda:0'), in_proj_covar=tensor([0.0333, 0.0362, 0.0314, 0.0254, 0.0356, 0.0323, 0.0334, 0.0303], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 11:27:39,542 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4919, 4.4994, 2.6768, 4.9023, 5.0396, 2.1192, 4.1392, 3.6280], + device='cuda:0'), covar=tensor([0.0578, 0.0609, 0.2555, 0.0526, 0.0465, 0.2897, 0.0939, 0.0755], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0236, 0.0220, 0.0241, 0.0215, 0.0198, 0.0228, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 11:27:57,224 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33828.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:28:04,234 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.49 vs. limit=5.0 +2023-03-28 11:28:17,493 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=5.08 vs. limit=5.0 +2023-03-28 11:28:24,665 INFO [train.py:892] (0/4) Epoch 19, batch 450, loss[loss=0.1956, simple_loss=0.2607, pruned_loss=0.06527, over 19883.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2616, pruned_loss=0.06037, over 3536959.65 frames. ], batch size: 158, lr: 8.35e-03, grad_scale: 16.0 +2023-03-28 11:29:08,694 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33859.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:30:18,955 INFO [train.py:892] (0/4) Epoch 19, batch 500, loss[loss=0.195, simple_loss=0.2664, pruned_loss=0.0618, over 19739.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2614, pruned_loss=0.06033, over 3628279.70 frames. ], batch size: 219, lr: 8.35e-03, grad_scale: 16.0 +2023-03-28 11:31:15,654 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.605e+02 4.236e+02 4.841e+02 6.500e+02 9.898e+02, threshold=9.682e+02, percent-clipped=2.0 +2023-03-28 11:31:23,437 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-03-28 11:32:08,561 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33939.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:32:11,679 INFO [train.py:892] (0/4) Epoch 19, batch 550, loss[loss=0.1831, simple_loss=0.2529, pruned_loss=0.05666, over 19740.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2615, pruned_loss=0.0605, over 3699597.56 frames. ], batch size: 80, lr: 8.34e-03, grad_scale: 16.0 +2023-03-28 11:32:43,386 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.58 vs. limit=2.0 +2023-03-28 11:34:04,579 INFO [train.py:892] (0/4) Epoch 19, batch 600, loss[loss=0.172, simple_loss=0.2464, pruned_loss=0.04878, over 19894.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2618, pruned_loss=0.06066, over 3754985.89 frames. ], batch size: 71, lr: 8.33e-03, grad_scale: 16.0 +2023-03-28 11:34:05,631 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8181, 2.1784, 2.4138, 3.1553, 3.5226, 3.6955, 3.6429, 3.6706], + device='cuda:0'), covar=tensor([0.0855, 0.1856, 0.1476, 0.0585, 0.0376, 0.0231, 0.0333, 0.0326], + device='cuda:0'), in_proj_covar=tensor([0.0150, 0.0169, 0.0171, 0.0142, 0.0123, 0.0116, 0.0109, 0.0105], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 11:34:25,599 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-34000.pt +2023-03-28 11:34:30,968 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34000.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:35:01,391 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.382e+02 4.429e+02 5.203e+02 6.509e+02 1.553e+03, threshold=1.041e+03, percent-clipped=5.0 +2023-03-28 11:35:38,203 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.8100, 1.7493, 1.9302, 1.8729, 1.8588, 1.9183, 1.7947, 1.9808], + device='cuda:0'), covar=tensor([0.0270, 0.0343, 0.0279, 0.0198, 0.0328, 0.0253, 0.0396, 0.0206], + device='cuda:0'), in_proj_covar=tensor([0.0064, 0.0060, 0.0065, 0.0056, 0.0070, 0.0065, 0.0082, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:0') +2023-03-28 11:35:42,593 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34033.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 11:36:02,220 INFO [train.py:892] (0/4) Epoch 19, batch 650, loss[loss=0.2402, simple_loss=0.3122, pruned_loss=0.08412, over 19640.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2607, pruned_loss=0.06004, over 3797522.63 frames. ], batch size: 343, lr: 8.33e-03, grad_scale: 16.0 +2023-03-28 11:36:09,675 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34044.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:36:27,893 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6932, 3.7974, 2.1900, 3.9586, 4.1112, 1.8561, 3.3183, 3.0943], + device='cuda:0'), covar=tensor([0.0678, 0.0718, 0.2688, 0.0688, 0.0439, 0.2743, 0.1049, 0.0769], + device='cuda:0'), in_proj_covar=tensor([0.0215, 0.0237, 0.0220, 0.0242, 0.0215, 0.0198, 0.0228, 0.0178], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 11:37:53,403 INFO [train.py:892] (0/4) Epoch 19, batch 700, loss[loss=0.1764, simple_loss=0.2541, pruned_loss=0.04942, over 19750.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2624, pruned_loss=0.06097, over 3831959.37 frames. ], batch size: 84, lr: 8.32e-03, grad_scale: 16.0 +2023-03-28 11:38:28,798 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34105.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:38:52,051 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.720e+02 4.319e+02 5.100e+02 5.966e+02 1.380e+03, threshold=1.020e+03, percent-clipped=2.0 +2023-03-28 11:39:17,469 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34128.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:39:45,518 INFO [train.py:892] (0/4) Epoch 19, batch 750, loss[loss=0.1837, simple_loss=0.2526, pruned_loss=0.0574, over 19796.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2617, pruned_loss=0.06059, over 3857328.79 frames. ], batch size: 236, lr: 8.32e-03, grad_scale: 16.0 +2023-03-28 11:39:57,310 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34145.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:40:09,955 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.59 vs. limit=2.0 +2023-03-28 11:41:06,549 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34176.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:41:40,067 INFO [train.py:892] (0/4) Epoch 19, batch 800, loss[loss=0.1949, simple_loss=0.2611, pruned_loss=0.06437, over 19758.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2617, pruned_loss=0.06099, over 3878174.73 frames. ], batch size: 182, lr: 8.31e-03, grad_scale: 16.0 +2023-03-28 11:42:14,585 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34206.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:42:20,909 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9894, 3.1186, 4.6801, 3.4407, 3.8900, 3.6696, 2.5659, 2.7821], + device='cuda:0'), covar=tensor([0.0997, 0.2979, 0.0417, 0.0906, 0.1460, 0.1209, 0.2178, 0.2340], + device='cuda:0'), in_proj_covar=tensor([0.0334, 0.0364, 0.0316, 0.0254, 0.0357, 0.0327, 0.0335, 0.0304], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 11:42:36,513 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.024e+02 4.346e+02 5.123e+02 5.919e+02 1.290e+03, threshold=1.025e+03, percent-clipped=2.0 +2023-03-28 11:43:11,148 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9165, 4.9500, 5.3510, 5.0807, 5.1496, 4.7666, 5.0447, 4.8979], + device='cuda:0'), covar=tensor([0.1470, 0.1410, 0.0888, 0.1232, 0.0705, 0.0853, 0.1851, 0.2000], + device='cuda:0'), in_proj_covar=tensor([0.0272, 0.0288, 0.0338, 0.0267, 0.0248, 0.0245, 0.0328, 0.0361], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 11:43:36,146 INFO [train.py:892] (0/4) Epoch 19, batch 850, loss[loss=0.1626, simple_loss=0.2379, pruned_loss=0.0437, over 19548.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.262, pruned_loss=0.06114, over 3894771.56 frames. ], batch size: 41, lr: 8.30e-03, grad_scale: 16.0 +2023-03-28 11:44:26,709 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.55 vs. limit=5.0 +2023-03-28 11:45:25,873 INFO [train.py:892] (0/4) Epoch 19, batch 900, loss[loss=0.1599, simple_loss=0.2414, pruned_loss=0.03916, over 19915.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2623, pruned_loss=0.06118, over 3905612.11 frames. ], batch size: 45, lr: 8.30e-03, grad_scale: 8.0 +2023-03-28 11:45:37,839 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34295.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:46:25,647 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.957e+02 4.098e+02 5.258e+02 6.127e+02 1.043e+03, threshold=1.052e+03, percent-clipped=1.0 +2023-03-28 11:47:00,600 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34333.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:47:16,666 INFO [train.py:892] (0/4) Epoch 19, batch 950, loss[loss=0.1683, simple_loss=0.2433, pruned_loss=0.04665, over 19872.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2621, pruned_loss=0.06094, over 3915388.06 frames. ], batch size: 108, lr: 8.29e-03, grad_scale: 8.0 +2023-03-28 11:48:47,835 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34381.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:49:08,919 INFO [train.py:892] (0/4) Epoch 19, batch 1000, loss[loss=0.1885, simple_loss=0.2543, pruned_loss=0.06134, over 19646.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2626, pruned_loss=0.0611, over 3923658.44 frames. ], batch size: 72, lr: 8.29e-03, grad_scale: 8.0 +2023-03-28 11:49:29,376 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34400.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:49:47,516 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-28 11:49:56,479 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.88 vs. limit=5.0 +2023-03-28 11:50:08,244 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.058e+02 4.216e+02 4.828e+02 6.014e+02 1.612e+03, threshold=9.655e+02, percent-clipped=1.0 +2023-03-28 11:51:00,684 INFO [train.py:892] (0/4) Epoch 19, batch 1050, loss[loss=0.2029, simple_loss=0.2703, pruned_loss=0.06769, over 19800.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2621, pruned_loss=0.06095, over 3928853.34 frames. ], batch size: 200, lr: 8.28e-03, grad_scale: 8.0 +2023-03-28 11:51:46,728 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.57 vs. limit=5.0 +2023-03-28 11:52:51,879 INFO [train.py:892] (0/4) Epoch 19, batch 1100, loss[loss=0.1882, simple_loss=0.2632, pruned_loss=0.05663, over 19895.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.261, pruned_loss=0.06027, over 3933855.69 frames. ], batch size: 61, lr: 8.27e-03, grad_scale: 8.0 +2023-03-28 11:52:58,108 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8512, 4.4997, 4.5750, 4.8788, 4.4517, 5.1000, 4.9877, 5.1437], + device='cuda:0'), covar=tensor([0.0669, 0.0311, 0.0411, 0.0289, 0.0715, 0.0297, 0.0363, 0.0265], + device='cuda:0'), in_proj_covar=tensor([0.0138, 0.0160, 0.0184, 0.0154, 0.0159, 0.0139, 0.0139, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 11:53:15,389 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34501.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:53:24,841 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2830, 3.8800, 4.0022, 4.2868, 3.8932, 4.2977, 4.3739, 4.4733], + device='cuda:0'), covar=tensor([0.0619, 0.0431, 0.0549, 0.0317, 0.0775, 0.0507, 0.0465, 0.0342], + device='cuda:0'), in_proj_covar=tensor([0.0138, 0.0160, 0.0184, 0.0154, 0.0159, 0.0140, 0.0140, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 11:53:50,398 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.527e+02 4.200e+02 4.924e+02 5.856e+02 9.802e+02, threshold=9.848e+02, percent-clipped=1.0 +2023-03-28 11:53:54,911 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0989, 2.3359, 3.5408, 3.0627, 3.4645, 3.6449, 3.3666, 3.4342], + device='cuda:0'), covar=tensor([0.0423, 0.0894, 0.0109, 0.0520, 0.0129, 0.0204, 0.0185, 0.0164], + device='cuda:0'), in_proj_covar=tensor([0.0090, 0.0097, 0.0079, 0.0148, 0.0075, 0.0088, 0.0082, 0.0076], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 11:54:11,699 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.09 vs. limit=5.0 +2023-03-28 11:54:41,902 INFO [train.py:892] (0/4) Epoch 19, batch 1150, loss[loss=0.1755, simple_loss=0.2481, pruned_loss=0.05145, over 19888.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2604, pruned_loss=0.06054, over 3938441.80 frames. ], batch size: 176, lr: 8.27e-03, grad_scale: 8.0 +2023-03-28 11:56:35,013 INFO [train.py:892] (0/4) Epoch 19, batch 1200, loss[loss=0.1681, simple_loss=0.2426, pruned_loss=0.04684, over 19826.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2604, pruned_loss=0.0604, over 3941411.89 frames. ], batch size: 57, lr: 8.26e-03, grad_scale: 8.0 +2023-03-28 11:56:41,373 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34593.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:56:45,556 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34595.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:57:36,125 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.821e+02 4.325e+02 5.082e+02 6.313e+02 1.101e+03, threshold=1.016e+03, percent-clipped=4.0 +2023-03-28 11:57:48,906 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34622.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:57:57,377 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34626.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:58:24,174 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4795, 3.1807, 3.4310, 3.1567, 3.7536, 3.7065, 4.2808, 4.7485], + device='cuda:0'), covar=tensor([0.0502, 0.1485, 0.1414, 0.1995, 0.1721, 0.1236, 0.0516, 0.0520], + device='cuda:0'), in_proj_covar=tensor([0.0230, 0.0229, 0.0252, 0.0243, 0.0279, 0.0243, 0.0207, 0.0225], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 11:58:30,498 INFO [train.py:892] (0/4) Epoch 19, batch 1250, loss[loss=0.2589, simple_loss=0.3474, pruned_loss=0.08516, over 18881.00 frames. ], tot_loss[loss=0.19, simple_loss=0.26, pruned_loss=0.06, over 3942881.34 frames. ], batch size: 514, lr: 8.26e-03, grad_scale: 8.0 +2023-03-28 11:58:35,633 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34643.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:58:59,680 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34654.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 11:59:27,719 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9311, 3.7594, 3.7833, 3.5173, 3.9266, 2.8655, 3.2062, 1.8325], + device='cuda:0'), covar=tensor([0.0232, 0.0241, 0.0155, 0.0214, 0.0143, 0.0960, 0.0698, 0.1573], + device='cuda:0'), in_proj_covar=tensor([0.0097, 0.0134, 0.0106, 0.0127, 0.0112, 0.0128, 0.0139, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 12:00:04,850 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34683.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 12:00:13,825 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34687.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:00:21,329 INFO [train.py:892] (0/4) Epoch 19, batch 1300, loss[loss=0.235, simple_loss=0.2982, pruned_loss=0.08592, over 19760.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2605, pruned_loss=0.06004, over 3945988.80 frames. ], batch size: 321, lr: 8.25e-03, grad_scale: 8.0 +2023-03-28 12:00:44,727 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34700.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:01:06,102 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34710.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 12:01:21,580 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.806e+02 3.918e+02 4.981e+02 5.928e+02 9.169e+02, threshold=9.963e+02, percent-clipped=0.0 +2023-03-28 12:02:16,235 INFO [train.py:892] (0/4) Epoch 19, batch 1350, loss[loss=0.195, simple_loss=0.2677, pruned_loss=0.06119, over 19843.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2596, pruned_loss=0.05935, over 3947042.42 frames. ], batch size: 58, lr: 8.24e-03, grad_scale: 8.0 +2023-03-28 12:02:32,870 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34748.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:03:20,406 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34771.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 12:04:08,653 INFO [train.py:892] (0/4) Epoch 19, batch 1400, loss[loss=0.1663, simple_loss=0.2416, pruned_loss=0.04552, over 19806.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2593, pruned_loss=0.05921, over 3948635.23 frames. ], batch size: 51, lr: 8.24e-03, grad_scale: 8.0 +2023-03-28 12:04:31,264 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34801.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:05:06,146 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.813e+02 4.248e+02 4.961e+02 6.105e+02 8.838e+02, threshold=9.921e+02, percent-clipped=0.0 +2023-03-28 12:05:56,219 INFO [train.py:892] (0/4) Epoch 19, batch 1450, loss[loss=0.1768, simple_loss=0.2362, pruned_loss=0.05863, over 19745.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2586, pruned_loss=0.05887, over 3949990.61 frames. ], batch size: 139, lr: 8.23e-03, grad_scale: 8.0 +2023-03-28 12:06:16,836 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34849.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:07:51,323 INFO [train.py:892] (0/4) Epoch 19, batch 1500, loss[loss=0.166, simple_loss=0.2432, pruned_loss=0.04436, over 19667.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2593, pruned_loss=0.05918, over 3950202.11 frames. ], batch size: 73, lr: 8.23e-03, grad_scale: 8.0 +2023-03-28 12:08:47,104 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.972e+02 4.472e+02 5.197e+02 6.398e+02 1.021e+03, threshold=1.039e+03, percent-clipped=2.0 +2023-03-28 12:09:34,432 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6720, 2.6836, 2.8549, 2.2182, 2.9429, 2.3681, 2.7551, 3.0207], + device='cuda:0'), covar=tensor([0.0486, 0.0439, 0.0479, 0.0794, 0.0303, 0.0438, 0.0408, 0.0266], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0076, 0.0073, 0.0103, 0.0069, 0.0070, 0.0068, 0.0061], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 12:09:43,743 INFO [train.py:892] (0/4) Epoch 19, batch 1550, loss[loss=0.1989, simple_loss=0.2728, pruned_loss=0.06248, over 19828.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2605, pruned_loss=0.06007, over 3949071.74 frames. ], batch size: 204, lr: 8.22e-03, grad_scale: 8.0 +2023-03-28 12:10:01,045 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34949.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:11:05,599 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34978.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 12:11:05,828 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4691, 2.7624, 3.8994, 3.0422, 3.3217, 3.1416, 2.2434, 2.3276], + device='cuda:0'), covar=tensor([0.1053, 0.2911, 0.0568, 0.0928, 0.1506, 0.1397, 0.2300, 0.2744], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0368, 0.0318, 0.0257, 0.0360, 0.0334, 0.0341, 0.0311], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 12:11:13,777 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34982.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:11:32,654 INFO [train.py:892] (0/4) Epoch 19, batch 1600, loss[loss=0.2545, simple_loss=0.3481, pruned_loss=0.08049, over 18679.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2608, pruned_loss=0.05994, over 3948478.73 frames. ], batch size: 564, lr: 8.22e-03, grad_scale: 8.0 +2023-03-28 12:12:30,718 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.006e+02 4.214e+02 4.806e+02 5.980e+02 1.376e+03, threshold=9.612e+02, percent-clipped=1.0 +2023-03-28 12:13:27,272 INFO [train.py:892] (0/4) Epoch 19, batch 1650, loss[loss=0.1718, simple_loss=0.2443, pruned_loss=0.04966, over 19754.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.26, pruned_loss=0.05965, over 3949802.50 frames. ], batch size: 110, lr: 8.21e-03, grad_scale: 8.0 +2023-03-28 12:14:22,696 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35066.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 12:14:49,497 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.67 vs. limit=2.0 +2023-03-28 12:15:21,221 INFO [train.py:892] (0/4) Epoch 19, batch 1700, loss[loss=0.1818, simple_loss=0.2412, pruned_loss=0.06118, over 19765.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2606, pruned_loss=0.06004, over 3949573.09 frames. ], batch size: 152, lr: 8.20e-03, grad_scale: 8.0 +2023-03-28 12:15:24,837 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35092.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:15:45,479 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0321, 2.6895, 3.0268, 3.2832, 3.7993, 4.1660, 4.0608, 4.1696], + device='cuda:0'), covar=tensor([0.0908, 0.1542, 0.1282, 0.0597, 0.0369, 0.0227, 0.0312, 0.0324], + device='cuda:0'), in_proj_covar=tensor([0.0152, 0.0169, 0.0171, 0.0141, 0.0125, 0.0116, 0.0110, 0.0106], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 12:16:04,716 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3143, 5.6058, 5.6507, 5.5769, 5.3657, 5.6375, 5.0848, 5.1420], + device='cuda:0'), covar=tensor([0.0394, 0.0429, 0.0502, 0.0397, 0.0537, 0.0468, 0.0612, 0.0829], + device='cuda:0'), in_proj_covar=tensor([0.0238, 0.0242, 0.0270, 0.0231, 0.0228, 0.0222, 0.0239, 0.0280], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 12:16:20,388 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.976e+02 3.844e+02 4.787e+02 6.177e+02 1.345e+03, threshold=9.574e+02, percent-clipped=3.0 +2023-03-28 12:17:08,469 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-03-28 12:17:09,576 INFO [train.py:892] (0/4) Epoch 19, batch 1750, loss[loss=0.2, simple_loss=0.2701, pruned_loss=0.06498, over 19829.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2598, pruned_loss=0.05959, over 3948988.02 frames. ], batch size: 147, lr: 8.20e-03, grad_scale: 8.0 +2023-03-28 12:17:10,514 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2612, 2.6037, 3.2288, 2.8981, 3.4421, 3.2736, 4.1019, 4.5239], + device='cuda:0'), covar=tensor([0.0579, 0.2079, 0.1625, 0.2283, 0.1609, 0.1688, 0.0634, 0.0569], + device='cuda:0'), in_proj_covar=tensor([0.0231, 0.0228, 0.0251, 0.0241, 0.0276, 0.0243, 0.0209, 0.0225], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 12:17:32,561 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35153.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:18:38,078 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-28 12:18:42,335 INFO [train.py:892] (0/4) Epoch 19, batch 1800, loss[loss=0.1822, simple_loss=0.2643, pruned_loss=0.05009, over 19831.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2588, pruned_loss=0.0589, over 3949456.67 frames. ], batch size: 58, lr: 8.19e-03, grad_scale: 8.0 +2023-03-28 12:19:29,998 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.773e+02 3.767e+02 4.672e+02 5.957e+02 1.054e+03, threshold=9.344e+02, percent-clipped=2.0 +2023-03-28 12:20:12,653 INFO [train.py:892] (0/4) Epoch 19, batch 1850, loss[loss=0.1923, simple_loss=0.2771, pruned_loss=0.05375, over 19842.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2611, pruned_loss=0.05938, over 3948865.96 frames. ], batch size: 58, lr: 8.19e-03, grad_scale: 8.0 +2023-03-28 12:20:20,387 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-19.pt +2023-03-28 12:21:18,362 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-28 12:21:18,792 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-28 12:21:19,181 INFO [train.py:892] (0/4) Epoch 20, batch 0, loss[loss=0.2164, simple_loss=0.2866, pruned_loss=0.07306, over 19710.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2866, pruned_loss=0.07306, over 19710.00 frames. ], batch size: 295, lr: 7.98e-03, grad_scale: 8.0 +2023-03-28 12:21:19,182 INFO [train.py:917] (0/4) Computing validation loss +2023-03-28 12:21:49,717 INFO [train.py:926] (0/4) Epoch 20, validation: loss=0.1718, simple_loss=0.2485, pruned_loss=0.04755, over 2883724.00 frames. +2023-03-28 12:21:49,718 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-28 12:21:57,390 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35249.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:22:01,626 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6219, 2.9119, 3.0620, 3.0230, 2.5911, 2.7675, 2.8342, 2.9768], + device='cuda:0'), covar=tensor([0.0274, 0.0291, 0.0216, 0.0177, 0.0368, 0.0301, 0.0352, 0.0320], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0063, 0.0066, 0.0059, 0.0073, 0.0068, 0.0085, 0.0059], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-03-28 12:23:03,191 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35278.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 12:23:11,974 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35282.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:23:42,886 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8123, 4.7959, 5.2989, 4.7528, 4.2885, 5.0508, 4.8671, 5.4579], + device='cuda:0'), covar=tensor([0.1015, 0.0397, 0.0390, 0.0427, 0.0734, 0.0473, 0.0483, 0.0318], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0217, 0.0213, 0.0222, 0.0204, 0.0225, 0.0222, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 12:23:44,082 INFO [train.py:892] (0/4) Epoch 20, batch 50, loss[loss=0.1854, simple_loss=0.2676, pruned_loss=0.05155, over 19793.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2525, pruned_loss=0.05501, over 891136.82 frames. ], batch size: 51, lr: 7.97e-03, grad_scale: 8.0 +2023-03-28 12:23:47,052 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35297.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:24:30,952 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.385e+02 4.131e+02 4.865e+02 6.041e+02 1.249e+03, threshold=9.730e+02, percent-clipped=3.0 +2023-03-28 12:24:53,239 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35326.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:25:03,274 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35330.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:25:03,965 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-28 12:25:26,290 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-28 12:25:38,295 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35345.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 12:25:39,725 INFO [train.py:892] (0/4) Epoch 20, batch 100, loss[loss=0.1953, simple_loss=0.2594, pruned_loss=0.06556, over 19836.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2574, pruned_loss=0.05773, over 1570716.30 frames. ], batch size: 171, lr: 7.96e-03, grad_scale: 8.0 +2023-03-28 12:26:04,124 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1886, 3.3075, 3.5243, 2.7442, 3.7591, 2.9303, 3.0155, 3.7065], + device='cuda:0'), covar=tensor([0.0820, 0.0345, 0.0698, 0.0725, 0.0328, 0.0430, 0.0620, 0.0271], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0075, 0.0073, 0.0102, 0.0068, 0.0069, 0.0068, 0.0060], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 12:26:24,921 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35366.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 12:26:42,292 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35374.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:27:25,801 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.93 vs. limit=5.0 +2023-03-28 12:27:28,761 INFO [train.py:892] (0/4) Epoch 20, batch 150, loss[loss=0.1797, simple_loss=0.2549, pruned_loss=0.05228, over 19842.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2582, pruned_loss=0.05767, over 2098760.43 frames. ], batch size: 49, lr: 7.96e-03, grad_scale: 8.0 +2023-03-28 12:27:54,617 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35406.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 12:28:12,782 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35414.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 12:28:19,114 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.855e+02 4.347e+02 5.138e+02 6.356e+02 9.999e+02, threshold=1.028e+03, percent-clipped=1.0 +2023-03-28 12:28:58,047 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35435.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:29:25,240 INFO [train.py:892] (0/4) Epoch 20, batch 200, loss[loss=0.1652, simple_loss=0.244, pruned_loss=0.0432, over 19814.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2588, pruned_loss=0.05766, over 2509940.12 frames. ], batch size: 103, lr: 7.95e-03, grad_scale: 8.0 +2023-03-28 12:29:30,500 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35448.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:31:20,201 INFO [train.py:892] (0/4) Epoch 20, batch 250, loss[loss=0.1857, simple_loss=0.2538, pruned_loss=0.05878, over 19594.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2603, pruned_loss=0.05929, over 2826946.80 frames. ], batch size: 45, lr: 7.95e-03, grad_scale: 8.0 +2023-03-28 12:32:02,734 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.970e+02 4.157e+02 5.011e+02 5.895e+02 1.207e+03, threshold=1.002e+03, percent-clipped=2.0 +2023-03-28 12:32:35,573 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35530.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:33:07,882 INFO [train.py:892] (0/4) Epoch 20, batch 300, loss[loss=0.3133, simple_loss=0.3691, pruned_loss=0.1287, over 19419.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2627, pruned_loss=0.06113, over 3074993.78 frames. ], batch size: 412, lr: 7.94e-03, grad_scale: 8.0 +2023-03-28 12:33:39,243 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.9096, 5.9986, 6.0491, 6.0010, 5.7414, 5.9884, 5.3259, 4.9632], + device='cuda:0'), covar=tensor([0.0633, 0.0830, 0.0764, 0.0703, 0.0983, 0.0881, 0.1327, 0.2565], + device='cuda:0'), in_proj_covar=tensor([0.0241, 0.0246, 0.0272, 0.0236, 0.0236, 0.0225, 0.0244, 0.0286], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 12:34:26,247 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-28 12:34:49,215 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35591.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:34:58,976 INFO [train.py:892] (0/4) Epoch 20, batch 350, loss[loss=0.1716, simple_loss=0.2415, pruned_loss=0.05081, over 19877.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2623, pruned_loss=0.06054, over 3268039.10 frames. ], batch size: 84, lr: 7.94e-03, grad_scale: 8.0 +2023-03-28 12:35:45,653 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.727e+02 4.459e+02 5.252e+02 6.207e+02 1.161e+03, threshold=1.050e+03, percent-clipped=2.0 +2023-03-28 12:36:55,520 INFO [train.py:892] (0/4) Epoch 20, batch 400, loss[loss=0.1665, simple_loss=0.2506, pruned_loss=0.04125, over 19889.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2596, pruned_loss=0.05867, over 3419539.33 frames. ], batch size: 52, lr: 7.93e-03, grad_scale: 8.0 +2023-03-28 12:38:46,382 INFO [train.py:892] (0/4) Epoch 20, batch 450, loss[loss=0.1988, simple_loss=0.2673, pruned_loss=0.06518, over 19840.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2603, pruned_loss=0.05876, over 3536258.61 frames. ], batch size: 184, lr: 7.93e-03, grad_scale: 8.0 +2023-03-28 12:38:47,662 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.44 vs. limit=5.0 +2023-03-28 12:38:58,464 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35701.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 12:39:31,593 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.836e+02 4.301e+02 4.934e+02 5.630e+02 1.538e+03, threshold=9.868e+02, percent-clipped=1.0 +2023-03-28 12:39:58,928 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35730.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:40:31,620 INFO [train.py:892] (0/4) Epoch 20, batch 500, loss[loss=0.203, simple_loss=0.274, pruned_loss=0.066, over 19680.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2594, pruned_loss=0.0584, over 3629448.70 frames. ], batch size: 265, lr: 7.92e-03, grad_scale: 8.0 +2023-03-28 12:40:37,899 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35748.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:41:26,147 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35771.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:42:19,102 INFO [train.py:892] (0/4) Epoch 20, batch 550, loss[loss=0.1864, simple_loss=0.2495, pruned_loss=0.06162, over 19773.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2588, pruned_loss=0.05863, over 3701584.62 frames. ], batch size: 130, lr: 7.92e-03, grad_scale: 8.0 +2023-03-28 12:42:19,788 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35796.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:42:51,754 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35811.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:43:04,498 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.312e+02 4.108e+02 4.834e+02 5.739e+02 9.285e+02, threshold=9.669e+02, percent-clipped=0.0 +2023-03-28 12:43:11,772 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-03-28 12:43:18,543 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-03-28 12:43:35,040 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3391, 3.2742, 1.8990, 4.0879, 3.5542, 4.0276, 4.0793, 3.1381], + device='cuda:0'), covar=tensor([0.0585, 0.0627, 0.1627, 0.0598, 0.0601, 0.0320, 0.0475, 0.0733], + device='cuda:0'), in_proj_covar=tensor([0.0134, 0.0134, 0.0138, 0.0136, 0.0121, 0.0121, 0.0133, 0.0137], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 12:43:39,264 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35832.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:44:08,194 INFO [train.py:892] (0/4) Epoch 20, batch 600, loss[loss=0.2082, simple_loss=0.273, pruned_loss=0.07168, over 19754.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2588, pruned_loss=0.05883, over 3757124.94 frames. ], batch size: 276, lr: 7.91e-03, grad_scale: 8.0 +2023-03-28 12:45:06,720 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35872.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:45:28,040 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4417, 3.3982, 3.7451, 3.4321, 3.2553, 3.6616, 3.5481, 3.8029], + device='cuda:0'), covar=tensor([0.0943, 0.0384, 0.0386, 0.0411, 0.1302, 0.0511, 0.0429, 0.0371], + device='cuda:0'), in_proj_covar=tensor([0.0278, 0.0217, 0.0214, 0.0223, 0.0202, 0.0225, 0.0223, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 12:45:36,125 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35886.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:45:54,694 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7786, 4.7865, 5.1689, 5.0195, 5.0023, 4.5130, 4.9086, 4.7643], + device='cuda:0'), covar=tensor([0.1492, 0.1429, 0.0885, 0.1181, 0.0774, 0.0983, 0.1871, 0.1888], + device='cuda:0'), in_proj_covar=tensor([0.0274, 0.0291, 0.0337, 0.0272, 0.0253, 0.0248, 0.0326, 0.0359], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 12:45:55,668 INFO [train.py:892] (0/4) Epoch 20, batch 650, loss[loss=0.1906, simple_loss=0.2452, pruned_loss=0.06799, over 19834.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2591, pruned_loss=0.05956, over 3799803.76 frames. ], batch size: 128, lr: 7.90e-03, grad_scale: 8.0 +2023-03-28 12:46:03,252 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.66 vs. limit=5.0 +2023-03-28 12:46:10,621 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3019, 4.5180, 2.7262, 4.7448, 4.9959, 2.0829, 4.0567, 3.5170], + device='cuda:0'), covar=tensor([0.0640, 0.0701, 0.2429, 0.0783, 0.0393, 0.2842, 0.1056, 0.0869], + device='cuda:0'), in_proj_covar=tensor([0.0220, 0.0242, 0.0223, 0.0249, 0.0220, 0.0202, 0.0230, 0.0181], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 12:46:29,816 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.1257, 5.4451, 5.4708, 5.3620, 5.1271, 5.4755, 4.8806, 5.0515], + device='cuda:0'), covar=tensor([0.0406, 0.0384, 0.0439, 0.0379, 0.0513, 0.0458, 0.0643, 0.0815], + device='cuda:0'), in_proj_covar=tensor([0.0236, 0.0242, 0.0266, 0.0231, 0.0232, 0.0223, 0.0239, 0.0279], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 12:46:42,644 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.720e+02 4.236e+02 5.120e+02 6.447e+02 1.973e+03, threshold=1.024e+03, percent-clipped=4.0 +2023-03-28 12:47:47,275 INFO [train.py:892] (0/4) Epoch 20, batch 700, loss[loss=0.3257, simple_loss=0.3761, pruned_loss=0.1377, over 19419.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2615, pruned_loss=0.06076, over 3832822.34 frames. ], batch size: 412, lr: 7.90e-03, grad_scale: 8.0 +2023-03-28 12:48:02,478 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-03-28 12:49:38,998 INFO [train.py:892] (0/4) Epoch 20, batch 750, loss[loss=0.1906, simple_loss=0.2726, pruned_loss=0.05427, over 19874.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.263, pruned_loss=0.0612, over 3858648.42 frames. ], batch size: 64, lr: 7.89e-03, grad_scale: 8.0 +2023-03-28 12:49:46,716 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8850, 3.8352, 4.1592, 4.9858, 3.2166, 3.4357, 3.0911, 2.9192], + device='cuda:0'), covar=tensor([0.0385, 0.2452, 0.0849, 0.0298, 0.2024, 0.0973, 0.1181, 0.1708], + device='cuda:0'), in_proj_covar=tensor([0.0226, 0.0331, 0.0238, 0.0182, 0.0241, 0.0195, 0.0209, 0.0214], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 12:49:48,141 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-36000.pt +2023-03-28 12:49:57,052 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36001.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 12:50:31,042 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.681e+02 4.396e+02 5.340e+02 6.377e+02 1.000e+03, threshold=1.068e+03, percent-clipped=0.0 +2023-03-28 12:51:01,260 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36030.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:51:29,402 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6113, 3.6336, 2.0564, 4.3369, 3.8721, 4.2350, 4.2265, 3.3694], + device='cuda:0'), covar=tensor([0.0511, 0.0476, 0.1566, 0.0515, 0.0519, 0.0369, 0.0739, 0.0670], + device='cuda:0'), in_proj_covar=tensor([0.0135, 0.0134, 0.0139, 0.0137, 0.0121, 0.0122, 0.0134, 0.0138], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 12:51:36,006 INFO [train.py:892] (0/4) Epoch 20, batch 800, loss[loss=0.1826, simple_loss=0.2533, pruned_loss=0.05602, over 19828.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2635, pruned_loss=0.06142, over 3878467.58 frames. ], batch size: 121, lr: 7.89e-03, grad_scale: 8.0 +2023-03-28 12:51:42,515 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36049.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 12:52:48,749 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36078.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:53:26,896 INFO [train.py:892] (0/4) Epoch 20, batch 850, loss[loss=0.1814, simple_loss=0.2411, pruned_loss=0.0609, over 19814.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2618, pruned_loss=0.06045, over 3895039.07 frames. ], batch size: 148, lr: 7.88e-03, grad_scale: 8.0 +2023-03-28 12:53:36,872 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7095, 2.6444, 4.2767, 3.1330, 3.4256, 3.2826, 2.2347, 2.3261], + device='cuda:0'), covar=tensor([0.1054, 0.3350, 0.0518, 0.0987, 0.1832, 0.1358, 0.2457, 0.3004], + device='cuda:0'), in_proj_covar=tensor([0.0335, 0.0367, 0.0320, 0.0259, 0.0360, 0.0334, 0.0342, 0.0311], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 12:54:15,634 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.913e+02 4.436e+02 5.161e+02 6.355e+02 1.299e+03, threshold=1.032e+03, percent-clipped=1.0 +2023-03-28 12:54:36,269 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36127.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:55:19,436 INFO [train.py:892] (0/4) Epoch 20, batch 900, loss[loss=0.1962, simple_loss=0.266, pruned_loss=0.06318, over 19693.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2613, pruned_loss=0.05995, over 3907316.88 frames. ], batch size: 75, lr: 7.88e-03, grad_scale: 8.0 +2023-03-28 12:55:33,675 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3876, 4.5245, 2.6628, 4.7347, 4.9490, 2.0897, 4.1675, 3.6704], + device='cuda:0'), covar=tensor([0.0637, 0.0687, 0.2530, 0.0665, 0.0456, 0.2716, 0.0850, 0.0728], + device='cuda:0'), in_proj_covar=tensor([0.0219, 0.0241, 0.0223, 0.0249, 0.0221, 0.0200, 0.0228, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 12:56:08,431 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36167.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:56:39,319 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36181.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:56:51,677 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36186.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:57:11,792 INFO [train.py:892] (0/4) Epoch 20, batch 950, loss[loss=0.2126, simple_loss=0.2838, pruned_loss=0.07067, over 19707.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2606, pruned_loss=0.05955, over 3917522.63 frames. ], batch size: 305, lr: 7.87e-03, grad_scale: 8.0 +2023-03-28 12:57:58,659 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.907e+02 4.513e+02 5.155e+02 5.945e+02 1.102e+03, threshold=1.031e+03, percent-clipped=1.0 +2023-03-28 12:58:39,632 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36234.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:58:57,619 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36242.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 12:59:05,358 INFO [train.py:892] (0/4) Epoch 20, batch 1000, loss[loss=0.1827, simple_loss=0.2542, pruned_loss=0.05566, over 19778.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2606, pruned_loss=0.05959, over 3924078.64 frames. ], batch size: 70, lr: 7.87e-03, grad_scale: 8.0 +2023-03-28 12:59:37,031 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-03-28 13:01:03,796 INFO [train.py:892] (0/4) Epoch 20, batch 1050, loss[loss=0.2751, simple_loss=0.3735, pruned_loss=0.08831, over 17802.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2612, pruned_loss=0.05963, over 3926823.95 frames. ], batch size: 633, lr: 7.86e-03, grad_scale: 16.0 +2023-03-28 13:01:49,681 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.691e+02 4.131e+02 5.046e+02 5.738e+02 1.025e+03, threshold=1.009e+03, percent-clipped=0.0 +2023-03-28 13:02:56,403 INFO [train.py:892] (0/4) Epoch 20, batch 1100, loss[loss=0.1696, simple_loss=0.2425, pruned_loss=0.04835, over 19804.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2608, pruned_loss=0.05923, over 3931757.88 frames. ], batch size: 47, lr: 7.86e-03, grad_scale: 16.0 +2023-03-28 13:04:48,935 INFO [train.py:892] (0/4) Epoch 20, batch 1150, loss[loss=0.1944, simple_loss=0.2634, pruned_loss=0.06266, over 19756.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2604, pruned_loss=0.05892, over 3934724.47 frames. ], batch size: 139, lr: 7.85e-03, grad_scale: 16.0 +2023-03-28 13:04:50,260 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-28 13:05:23,543 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9240, 2.2826, 3.5833, 3.1790, 3.6287, 3.7069, 3.4949, 3.4608], + device='cuda:0'), covar=tensor([0.0458, 0.0891, 0.0114, 0.0482, 0.0110, 0.0219, 0.0183, 0.0176], + device='cuda:0'), in_proj_covar=tensor([0.0089, 0.0097, 0.0080, 0.0148, 0.0075, 0.0089, 0.0083, 0.0076], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 13:05:36,782 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.688e+02 4.392e+02 5.039e+02 5.876e+02 1.119e+03, threshold=1.008e+03, percent-clipped=1.0 +2023-03-28 13:06:00,984 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36427.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:06:43,082 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9812, 3.0350, 4.3743, 3.3414, 3.5709, 3.5260, 2.3531, 2.5864], + device='cuda:0'), covar=tensor([0.0945, 0.3280, 0.0516, 0.0945, 0.1682, 0.1318, 0.2360, 0.2641], + device='cuda:0'), in_proj_covar=tensor([0.0333, 0.0367, 0.0321, 0.0258, 0.0359, 0.0333, 0.0340, 0.0309], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 13:06:43,829 INFO [train.py:892] (0/4) Epoch 20, batch 1200, loss[loss=0.1839, simple_loss=0.2539, pruned_loss=0.05691, over 19739.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2595, pruned_loss=0.05851, over 3938965.05 frames. ], batch size: 92, lr: 7.85e-03, grad_scale: 16.0 +2023-03-28 13:07:15,482 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.1794, 4.8802, 4.8708, 5.0881, 4.7402, 5.3901, 5.3023, 5.5046], + device='cuda:0'), covar=tensor([0.0640, 0.0240, 0.0365, 0.0225, 0.0578, 0.0282, 0.0318, 0.0231], + device='cuda:0'), in_proj_covar=tensor([0.0142, 0.0162, 0.0185, 0.0160, 0.0159, 0.0144, 0.0140, 0.0182], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 13:07:31,445 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36467.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:07:49,866 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36475.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:08:37,660 INFO [train.py:892] (0/4) Epoch 20, batch 1250, loss[loss=0.1664, simple_loss=0.2378, pruned_loss=0.0475, over 19832.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2593, pruned_loss=0.05821, over 3940598.98 frames. ], batch size: 76, lr: 7.84e-03, grad_scale: 16.0 +2023-03-28 13:09:22,371 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36515.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:09:27,477 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.037e+02 4.208e+02 4.891e+02 6.117e+02 9.708e+02, threshold=9.782e+02, percent-clipped=0.0 +2023-03-28 13:09:50,148 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-28 13:10:11,419 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36537.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:10:28,744 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4205, 2.7567, 2.5247, 1.9958, 2.4831, 2.7491, 2.6465, 2.7622], + device='cuda:0'), covar=tensor([0.0281, 0.0275, 0.0269, 0.0509, 0.0354, 0.0286, 0.0268, 0.0191], + device='cuda:0'), in_proj_covar=tensor([0.0085, 0.0079, 0.0087, 0.0090, 0.0093, 0.0069, 0.0068, 0.0070], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 13:10:29,801 INFO [train.py:892] (0/4) Epoch 20, batch 1300, loss[loss=0.1832, simple_loss=0.2593, pruned_loss=0.05352, over 19808.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2596, pruned_loss=0.05828, over 3942975.70 frames. ], batch size: 103, lr: 7.84e-03, grad_scale: 16.0 +2023-03-28 13:11:25,637 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9496, 4.0294, 2.3382, 4.2900, 4.4180, 1.9276, 3.6976, 3.3277], + device='cuda:0'), covar=tensor([0.0664, 0.0829, 0.2604, 0.0702, 0.0433, 0.2919, 0.0958, 0.0765], + device='cuda:0'), in_proj_covar=tensor([0.0218, 0.0241, 0.0222, 0.0250, 0.0221, 0.0200, 0.0230, 0.0181], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 13:12:24,050 INFO [train.py:892] (0/4) Epoch 20, batch 1350, loss[loss=0.1746, simple_loss=0.235, pruned_loss=0.05705, over 19866.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2599, pruned_loss=0.05853, over 3944822.10 frames. ], batch size: 129, lr: 7.83e-03, grad_scale: 16.0 +2023-03-28 13:12:45,718 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36605.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:12:51,660 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36608.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:13:00,683 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36612.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:13:12,351 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.844e+02 4.275e+02 5.272e+02 6.307e+02 1.102e+03, threshold=1.054e+03, percent-clipped=1.0 +2023-03-28 13:14:16,020 INFO [train.py:892] (0/4) Epoch 20, batch 1400, loss[loss=0.1706, simple_loss=0.2435, pruned_loss=0.04883, over 19860.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2593, pruned_loss=0.05806, over 3944495.47 frames. ], batch size: 104, lr: 7.82e-03, grad_scale: 16.0 +2023-03-28 13:14:21,283 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8080, 2.6792, 4.3394, 3.7932, 4.2252, 4.3514, 4.2396, 4.1078], + device='cuda:0'), covar=tensor([0.0325, 0.0883, 0.0106, 0.0799, 0.0116, 0.0207, 0.0149, 0.0159], + device='cuda:0'), in_proj_covar=tensor([0.0090, 0.0097, 0.0080, 0.0149, 0.0075, 0.0089, 0.0083, 0.0077], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 13:14:27,225 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7627, 2.1286, 2.7518, 3.1162, 3.5220, 3.7853, 3.7798, 3.7954], + device='cuda:0'), covar=tensor([0.0947, 0.1862, 0.1266, 0.0654, 0.0414, 0.0254, 0.0284, 0.0413], + device='cuda:0'), in_proj_covar=tensor([0.0151, 0.0167, 0.0170, 0.0142, 0.0125, 0.0118, 0.0110, 0.0106], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 13:15:02,102 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36666.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:15:09,233 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36669.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:15:16,921 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36673.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:16:06,769 INFO [train.py:892] (0/4) Epoch 20, batch 1450, loss[loss=0.1827, simple_loss=0.2577, pruned_loss=0.05387, over 19616.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.258, pruned_loss=0.05731, over 3947390.80 frames. ], batch size: 51, lr: 7.82e-03, grad_scale: 16.0 +2023-03-28 13:16:17,527 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36700.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 13:16:54,914 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.742e+02 4.075e+02 4.928e+02 6.205e+02 1.251e+03, threshold=9.857e+02, percent-clipped=1.0 +2023-03-28 13:17:56,086 INFO [train.py:892] (0/4) Epoch 20, batch 1500, loss[loss=0.1698, simple_loss=0.2385, pruned_loss=0.05056, over 19771.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2586, pruned_loss=0.05763, over 3948008.04 frames. ], batch size: 70, lr: 7.81e-03, grad_scale: 16.0 +2023-03-28 13:18:05,680 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2673, 3.9156, 4.0802, 4.2447, 3.9886, 4.2932, 4.4030, 4.5515], + device='cuda:0'), covar=tensor([0.0622, 0.0384, 0.0541, 0.0339, 0.0595, 0.0494, 0.0387, 0.0302], + device='cuda:0'), in_proj_covar=tensor([0.0140, 0.0161, 0.0184, 0.0159, 0.0158, 0.0143, 0.0141, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 13:18:30,375 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36761.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 13:18:40,218 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-03-28 13:19:49,260 INFO [train.py:892] (0/4) Epoch 20, batch 1550, loss[loss=0.2197, simple_loss=0.2885, pruned_loss=0.07546, over 19648.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2597, pruned_loss=0.05845, over 3947642.79 frames. ], batch size: 330, lr: 7.81e-03, grad_scale: 16.0 +2023-03-28 13:20:07,262 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6371, 3.1027, 2.7063, 2.2096, 2.8244, 3.0441, 2.9802, 3.1023], + device='cuda:0'), covar=tensor([0.0276, 0.0245, 0.0235, 0.0492, 0.0300, 0.0239, 0.0217, 0.0170], + device='cuda:0'), in_proj_covar=tensor([0.0084, 0.0079, 0.0086, 0.0089, 0.0093, 0.0069, 0.0068, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 13:20:09,765 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36805.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 13:20:34,701 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6047, 4.6989, 5.0046, 4.8506, 4.8341, 4.4342, 4.7475, 4.5906], + device='cuda:0'), covar=tensor([0.1439, 0.1372, 0.0922, 0.1309, 0.0884, 0.0981, 0.1966, 0.2015], + device='cuda:0'), in_proj_covar=tensor([0.0274, 0.0293, 0.0341, 0.0272, 0.0256, 0.0253, 0.0328, 0.0362], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 13:20:35,959 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.615e+02 4.163e+02 4.935e+02 6.337e+02 1.233e+03, threshold=9.870e+02, percent-clipped=4.0 +2023-03-28 13:21:24,644 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36837.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:21:43,508 INFO [train.py:892] (0/4) Epoch 20, batch 1600, loss[loss=0.1699, simple_loss=0.2373, pruned_loss=0.05126, over 19699.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2597, pruned_loss=0.05836, over 3947835.19 frames. ], batch size: 101, lr: 7.80e-03, grad_scale: 16.0 +2023-03-28 13:21:53,685 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9035, 5.2478, 5.2913, 5.1770, 4.9239, 5.2909, 4.7102, 4.7637], + device='cuda:0'), covar=tensor([0.0451, 0.0436, 0.0454, 0.0461, 0.0557, 0.0456, 0.0689, 0.0955], + device='cuda:0'), in_proj_covar=tensor([0.0240, 0.0250, 0.0270, 0.0236, 0.0237, 0.0226, 0.0243, 0.0286], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 13:22:33,869 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36866.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 13:23:15,622 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36885.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:23:20,379 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3878, 2.9954, 4.8388, 3.9491, 4.4739, 4.7323, 4.6502, 4.4892], + device='cuda:0'), covar=tensor([0.0266, 0.0820, 0.0079, 0.1038, 0.0108, 0.0172, 0.0123, 0.0121], + device='cuda:0'), in_proj_covar=tensor([0.0090, 0.0098, 0.0081, 0.0150, 0.0076, 0.0089, 0.0084, 0.0078], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 13:23:38,878 INFO [train.py:892] (0/4) Epoch 20, batch 1650, loss[loss=0.1661, simple_loss=0.229, pruned_loss=0.05159, over 19781.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2577, pruned_loss=0.0572, over 3948969.99 frames. ], batch size: 163, lr: 7.80e-03, grad_scale: 16.0 +2023-03-28 13:23:57,912 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9927, 2.4740, 3.0965, 3.3168, 3.7026, 4.1253, 3.9865, 4.1839], + device='cuda:0'), covar=tensor([0.0864, 0.1633, 0.1168, 0.0586, 0.0384, 0.0210, 0.0321, 0.0288], + device='cuda:0'), in_proj_covar=tensor([0.0153, 0.0168, 0.0173, 0.0144, 0.0126, 0.0120, 0.0111, 0.0109], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 13:24:05,928 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2193, 3.4330, 3.6328, 4.3025, 2.7775, 3.2396, 2.8852, 2.5836], + device='cuda:0'), covar=tensor([0.0485, 0.2188, 0.0945, 0.0332, 0.2093, 0.0863, 0.1237, 0.1762], + device='cuda:0'), in_proj_covar=tensor([0.0226, 0.0332, 0.0236, 0.0180, 0.0239, 0.0195, 0.0207, 0.0212], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 13:24:13,751 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36911.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:24:26,329 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.686e+02 3.920e+02 4.713e+02 5.573e+02 1.416e+03, threshold=9.427e+02, percent-clipped=1.0 +2023-03-28 13:25:31,479 INFO [train.py:892] (0/4) Epoch 20, batch 1700, loss[loss=0.1859, simple_loss=0.269, pruned_loss=0.05143, over 19726.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2581, pruned_loss=0.05729, over 3948704.76 frames. ], batch size: 61, lr: 7.79e-03, grad_scale: 16.0 +2023-03-28 13:25:34,518 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7380, 2.5447, 2.7924, 2.5922, 3.0400, 2.9198, 3.6590, 3.8945], + device='cuda:0'), covar=tensor([0.0623, 0.1775, 0.1609, 0.2112, 0.1657, 0.1608, 0.0529, 0.0514], + device='cuda:0'), in_proj_covar=tensor([0.0236, 0.0233, 0.0254, 0.0246, 0.0281, 0.0246, 0.0214, 0.0231], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 13:25:58,359 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36958.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:26:04,456 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36961.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:26:10,694 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36964.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:26:21,005 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36968.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:26:29,981 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36972.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:27:17,388 INFO [train.py:892] (0/4) Epoch 20, batch 1750, loss[loss=0.2007, simple_loss=0.2667, pruned_loss=0.06735, over 19770.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2587, pruned_loss=0.05768, over 3948025.47 frames. ], batch size: 233, lr: 7.79e-03, grad_scale: 16.0 +2023-03-28 13:27:35,547 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37004.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:28:00,090 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.609e+02 4.040e+02 4.922e+02 6.023e+02 1.252e+03, threshold=9.844e+02, percent-clipped=2.0 +2023-03-28 13:28:04,724 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37019.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:28:12,876 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=5.09 vs. limit=5.0 +2023-03-28 13:28:46,430 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-28 13:28:55,525 INFO [train.py:892] (0/4) Epoch 20, batch 1800, loss[loss=0.1855, simple_loss=0.2541, pruned_loss=0.05847, over 19732.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2596, pruned_loss=0.05863, over 3946597.93 frames. ], batch size: 63, lr: 7.78e-03, grad_scale: 16.0 +2023-03-28 13:29:13,038 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37056.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 13:29:29,124 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37065.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:30:21,864 INFO [train.py:892] (0/4) Epoch 20, batch 1850, loss[loss=0.1882, simple_loss=0.2693, pruned_loss=0.05355, over 19808.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.261, pruned_loss=0.0583, over 3946266.80 frames. ], batch size: 57, lr: 7.78e-03, grad_scale: 16.0 +2023-03-28 13:30:29,750 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-20.pt +2023-03-28 13:31:23,900 INFO [train.py:892] (0/4) Epoch 21, batch 0, loss[loss=0.1631, simple_loss=0.2285, pruned_loss=0.04882, over 19836.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2285, pruned_loss=0.04882, over 19836.00 frames. ], batch size: 171, lr: 7.59e-03, grad_scale: 16.0 +2023-03-28 13:31:23,901 INFO [train.py:917] (0/4) Computing validation loss +2023-03-28 13:31:56,332 INFO [train.py:926] (0/4) Epoch 21, validation: loss=0.1717, simple_loss=0.248, pruned_loss=0.04765, over 2883724.00 frames. +2023-03-28 13:31:56,333 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-28 13:32:31,700 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.932e+02 4.162e+02 4.969e+02 6.097e+02 9.968e+02, threshold=9.939e+02, percent-clipped=2.0 +2023-03-28 13:33:09,034 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-03-28 13:33:23,710 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2873, 3.2901, 1.9910, 4.1052, 3.6100, 4.0139, 4.0437, 3.0954], + device='cuda:0'), covar=tensor([0.0569, 0.0599, 0.1540, 0.0559, 0.0524, 0.0320, 0.0535, 0.0755], + device='cuda:0'), in_proj_covar=tensor([0.0136, 0.0135, 0.0140, 0.0139, 0.0121, 0.0121, 0.0135, 0.0137], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 13:33:48,513 INFO [train.py:892] (0/4) Epoch 21, batch 50, loss[loss=0.1963, simple_loss=0.2792, pruned_loss=0.05676, over 19671.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2479, pruned_loss=0.052, over 890905.59 frames. ], batch size: 52, lr: 7.58e-03, grad_scale: 16.0 +2023-03-28 13:34:10,789 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37161.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 13:34:13,438 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-28 13:35:40,044 INFO [train.py:892] (0/4) Epoch 21, batch 100, loss[loss=0.164, simple_loss=0.2307, pruned_loss=0.04864, over 19888.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2503, pruned_loss=0.05443, over 1569858.40 frames. ], batch size: 176, lr: 7.58e-03, grad_scale: 16.0 +2023-03-28 13:36:14,434 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.842e+02 4.106e+02 4.671e+02 5.605e+02 1.435e+03, threshold=9.342e+02, percent-clipped=1.0 +2023-03-28 13:37:33,192 INFO [train.py:892] (0/4) Epoch 21, batch 150, loss[loss=0.1588, simple_loss=0.2392, pruned_loss=0.0392, over 19806.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2524, pruned_loss=0.05496, over 2097024.10 frames. ], batch size: 86, lr: 7.57e-03, grad_scale: 16.0 +2023-03-28 13:37:34,052 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8837, 3.9238, 4.2689, 4.0356, 4.2093, 3.7228, 4.0143, 3.7658], + device='cuda:0'), covar=tensor([0.1577, 0.1607, 0.0991, 0.1365, 0.1066, 0.1111, 0.1839, 0.2331], + device='cuda:0'), in_proj_covar=tensor([0.0274, 0.0293, 0.0339, 0.0272, 0.0256, 0.0252, 0.0330, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 13:37:57,641 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37261.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:38:04,079 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37264.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:38:06,399 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.8793, 2.2400, 2.0478, 1.4494, 2.0879, 2.2043, 2.0949, 2.1194], + device='cuda:0'), covar=tensor([0.0344, 0.0266, 0.0291, 0.0582, 0.0349, 0.0248, 0.0228, 0.0232], + device='cuda:0'), in_proj_covar=tensor([0.0085, 0.0079, 0.0086, 0.0090, 0.0092, 0.0070, 0.0068, 0.0070], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 13:38:10,100 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37267.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:38:13,986 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37268.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:38:14,076 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37268.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:39:25,426 INFO [train.py:892] (0/4) Epoch 21, batch 200, loss[loss=0.1696, simple_loss=0.2336, pruned_loss=0.05281, over 19744.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2555, pruned_loss=0.05645, over 2507906.40 frames. ], batch size: 140, lr: 7.57e-03, grad_scale: 16.0 +2023-03-28 13:39:32,918 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-03-28 13:39:44,947 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37309.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:39:51,123 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37312.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:39:55,312 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37314.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:40:00,050 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37316.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:40:02,144 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.481e+02 4.044e+02 5.052e+02 5.778e+02 1.190e+03, threshold=1.010e+03, percent-clipped=3.0 +2023-03-28 13:40:11,731 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9179, 2.3675, 3.7493, 3.2804, 3.5685, 3.7829, 3.6270, 3.4780], + device='cuda:0'), covar=tensor([0.0514, 0.0928, 0.0118, 0.0597, 0.0165, 0.0213, 0.0194, 0.0200], + device='cuda:0'), in_proj_covar=tensor([0.0090, 0.0097, 0.0080, 0.0148, 0.0076, 0.0089, 0.0084, 0.0078], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 13:40:32,418 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37329.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:41:19,123 INFO [train.py:892] (0/4) Epoch 21, batch 250, loss[loss=0.1777, simple_loss=0.245, pruned_loss=0.05519, over 19840.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2559, pruned_loss=0.05686, over 2828099.46 frames. ], batch size: 190, lr: 7.56e-03, grad_scale: 16.0 +2023-03-28 13:41:30,916 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37356.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 13:41:39,260 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37360.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:41:47,542 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37364.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:42:45,527 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-03-28 13:43:12,252 INFO [train.py:892] (0/4) Epoch 21, batch 300, loss[loss=0.1903, simple_loss=0.2583, pruned_loss=0.06108, over 19819.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2572, pruned_loss=0.0577, over 3077153.74 frames. ], batch size: 128, lr: 7.56e-03, grad_scale: 16.0 +2023-03-28 13:43:21,045 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37404.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 13:43:49,251 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.947e+02 4.191e+02 4.785e+02 5.606e+02 9.286e+02, threshold=9.571e+02, percent-clipped=0.0 +2023-03-28 13:44:06,952 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37425.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:44:26,683 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-28 13:45:03,286 INFO [train.py:892] (0/4) Epoch 21, batch 350, loss[loss=0.1794, simple_loss=0.2516, pruned_loss=0.05364, over 19699.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2563, pruned_loss=0.05682, over 3270990.64 frames. ], batch size: 101, lr: 7.55e-03, grad_scale: 16.0 +2023-03-28 13:45:26,033 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37461.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 13:46:57,109 INFO [train.py:892] (0/4) Epoch 21, batch 400, loss[loss=0.209, simple_loss=0.2815, pruned_loss=0.06827, over 19652.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.257, pruned_loss=0.05702, over 3420657.06 frames. ], batch size: 330, lr: 7.55e-03, grad_scale: 16.0 +2023-03-28 13:47:14,055 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37509.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 13:47:33,496 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.804e+02 3.892e+02 4.730e+02 5.808e+02 1.803e+03, threshold=9.460e+02, percent-clipped=3.0 +2023-03-28 13:48:49,650 INFO [train.py:892] (0/4) Epoch 21, batch 450, loss[loss=0.1729, simple_loss=0.2459, pruned_loss=0.04994, over 19772.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2579, pruned_loss=0.05727, over 3538396.88 frames. ], batch size: 108, lr: 7.54e-03, grad_scale: 16.0 +2023-03-28 13:49:28,576 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37567.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:50:43,322 INFO [train.py:892] (0/4) Epoch 21, batch 500, loss[loss=0.1734, simple_loss=0.2497, pruned_loss=0.04852, over 19710.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2581, pruned_loss=0.05744, over 3629445.01 frames. ], batch size: 60, lr: 7.54e-03, grad_scale: 16.0 +2023-03-28 13:51:16,453 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37614.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:51:18,800 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37615.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:51:22,191 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.832e+02 3.951e+02 4.577e+02 5.485e+02 9.092e+02, threshold=9.154e+02, percent-clipped=0.0 +2023-03-28 13:51:37,806 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37624.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:52:39,184 INFO [train.py:892] (0/4) Epoch 21, batch 550, loss[loss=0.192, simple_loss=0.2774, pruned_loss=0.05328, over 19677.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2583, pruned_loss=0.05747, over 3700841.79 frames. ], batch size: 49, lr: 7.53e-03, grad_scale: 16.0 +2023-03-28 13:52:59,487 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37660.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:53:03,155 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37662.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:53:32,257 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-28 13:54:32,552 INFO [train.py:892] (0/4) Epoch 21, batch 600, loss[loss=0.1878, simple_loss=0.2508, pruned_loss=0.06235, over 19850.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2593, pruned_loss=0.05814, over 3755097.97 frames. ], batch size: 137, lr: 7.53e-03, grad_scale: 16.0 +2023-03-28 13:54:49,017 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37708.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:55:09,545 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.449e+02 4.150e+02 4.838e+02 5.835e+02 9.838e+02, threshold=9.675e+02, percent-clipped=3.0 +2023-03-28 13:55:17,226 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37720.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 13:55:32,742 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-28 13:56:25,829 INFO [train.py:892] (0/4) Epoch 21, batch 650, loss[loss=0.1689, simple_loss=0.2465, pruned_loss=0.04567, over 19866.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2586, pruned_loss=0.05759, over 3797718.71 frames. ], batch size: 104, lr: 7.52e-03, grad_scale: 16.0 +2023-03-28 13:58:19,151 INFO [train.py:892] (0/4) Epoch 21, batch 700, loss[loss=0.1404, simple_loss=0.2137, pruned_loss=0.03353, over 19699.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2581, pruned_loss=0.05734, over 3831242.20 frames. ], batch size: 46, lr: 7.52e-03, grad_scale: 16.0 +2023-03-28 13:58:55,423 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.050e+02 4.218e+02 4.924e+02 5.585e+02 8.798e+02, threshold=9.849e+02, percent-clipped=0.0 +2023-03-28 14:00:09,464 INFO [train.py:892] (0/4) Epoch 21, batch 750, loss[loss=0.2243, simple_loss=0.2992, pruned_loss=0.0747, over 19809.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2575, pruned_loss=0.05685, over 3857525.32 frames. ], batch size: 57, lr: 7.51e-03, grad_scale: 16.0 +2023-03-28 14:02:04,456 INFO [train.py:892] (0/4) Epoch 21, batch 800, loss[loss=0.1634, simple_loss=0.2443, pruned_loss=0.04129, over 19763.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2586, pruned_loss=0.05713, over 3876939.11 frames. ], batch size: 100, lr: 7.51e-03, grad_scale: 16.0 +2023-03-28 14:02:39,802 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.002e+02 4.314e+02 5.039e+02 6.116e+02 1.150e+03, threshold=1.008e+03, percent-clipped=3.0 +2023-03-28 14:02:56,804 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37924.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:03:23,580 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7059, 3.3478, 3.4490, 3.6857, 3.4832, 3.6694, 3.7844, 3.9456], + device='cuda:0'), covar=tensor([0.0683, 0.0503, 0.0562, 0.0395, 0.0745, 0.0578, 0.0427, 0.0365], + device='cuda:0'), in_proj_covar=tensor([0.0144, 0.0166, 0.0190, 0.0162, 0.0164, 0.0147, 0.0144, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 14:03:57,498 INFO [train.py:892] (0/4) Epoch 21, batch 850, loss[loss=0.18, simple_loss=0.2589, pruned_loss=0.0506, over 19732.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2574, pruned_loss=0.0566, over 3892321.06 frames. ], batch size: 63, lr: 7.50e-03, grad_scale: 16.0 +2023-03-28 14:04:07,808 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9703, 3.8065, 3.8743, 3.6501, 4.0285, 2.8850, 3.2630, 1.9410], + device='cuda:0'), covar=tensor([0.0281, 0.0290, 0.0211, 0.0242, 0.0217, 0.1082, 0.0939, 0.1838], + device='cuda:0'), in_proj_covar=tensor([0.0098, 0.0137, 0.0109, 0.0129, 0.0114, 0.0128, 0.0140, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 14:04:45,521 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37972.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:05:01,987 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37979.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:05:45,694 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-38000.pt +2023-03-28 14:05:53,010 INFO [train.py:892] (0/4) Epoch 21, batch 900, loss[loss=0.1762, simple_loss=0.248, pruned_loss=0.05218, over 19771.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.257, pruned_loss=0.05622, over 3905960.58 frames. ], batch size: 198, lr: 7.50e-03, grad_scale: 16.0 +2023-03-28 14:06:31,293 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.842e+02 4.121e+02 4.904e+02 6.115e+02 1.131e+03, threshold=9.807e+02, percent-clipped=1.0 +2023-03-28 14:06:38,654 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=38020.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:07:21,111 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38040.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:07:28,531 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7077, 3.0445, 3.1739, 3.6188, 2.5252, 3.0318, 2.5203, 2.3045], + device='cuda:0'), covar=tensor([0.0594, 0.1884, 0.0979, 0.0406, 0.2056, 0.0777, 0.1248, 0.1751], + device='cuda:0'), in_proj_covar=tensor([0.0229, 0.0332, 0.0239, 0.0183, 0.0240, 0.0196, 0.0208, 0.0213], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 14:07:46,391 INFO [train.py:892] (0/4) Epoch 21, batch 950, loss[loss=0.179, simple_loss=0.2592, pruned_loss=0.04939, over 19889.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2571, pruned_loss=0.05651, over 3915995.20 frames. ], batch size: 61, lr: 7.49e-03, grad_scale: 16.0 +2023-03-28 14:08:23,010 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=38068.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:09:05,326 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9519, 3.8548, 4.2584, 3.8587, 3.6564, 4.1358, 3.9451, 4.3118], + device='cuda:0'), covar=tensor([0.0846, 0.0367, 0.0371, 0.0385, 0.1036, 0.0470, 0.0427, 0.0331], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0216, 0.0213, 0.0224, 0.0204, 0.0223, 0.0224, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 14:09:20,009 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3968, 5.6856, 5.7052, 5.5961, 5.3611, 5.6776, 5.1279, 5.1816], + device='cuda:0'), covar=tensor([0.0388, 0.0371, 0.0495, 0.0392, 0.0596, 0.0524, 0.0609, 0.0889], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0255, 0.0276, 0.0239, 0.0240, 0.0232, 0.0247, 0.0289], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 14:09:34,129 INFO [train.py:892] (0/4) Epoch 21, batch 1000, loss[loss=0.1713, simple_loss=0.2367, pruned_loss=0.05298, over 19760.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2573, pruned_loss=0.05704, over 3923162.52 frames. ], batch size: 125, lr: 7.49e-03, grad_scale: 16.0 +2023-03-28 14:10:09,400 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.902e+02 4.380e+02 5.195e+02 6.424e+02 1.121e+03, threshold=1.039e+03, percent-clipped=5.0 +2023-03-28 14:10:55,274 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.9556, 2.3164, 2.1034, 1.4515, 2.1007, 2.2921, 2.1558, 2.2960], + device='cuda:0'), covar=tensor([0.0392, 0.0265, 0.0310, 0.0634, 0.0375, 0.0251, 0.0263, 0.0213], + device='cuda:0'), in_proj_covar=tensor([0.0087, 0.0081, 0.0088, 0.0092, 0.0095, 0.0071, 0.0069, 0.0071], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 14:11:25,974 INFO [train.py:892] (0/4) Epoch 21, batch 1050, loss[loss=0.1876, simple_loss=0.2588, pruned_loss=0.05817, over 19742.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2579, pruned_loss=0.05726, over 3928133.07 frames. ], batch size: 106, lr: 7.48e-03, grad_scale: 16.0 +2023-03-28 14:12:51,878 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.5465, 5.8876, 6.0945, 5.9630, 5.6939, 5.6723, 5.7139, 5.6718], + device='cuda:0'), covar=tensor([0.1285, 0.1055, 0.0642, 0.1045, 0.0598, 0.0651, 0.1602, 0.1685], + device='cuda:0'), in_proj_covar=tensor([0.0271, 0.0290, 0.0336, 0.0268, 0.0249, 0.0253, 0.0328, 0.0356], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 14:13:22,207 INFO [train.py:892] (0/4) Epoch 21, batch 1100, loss[loss=0.1769, simple_loss=0.2491, pruned_loss=0.05232, over 19778.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2568, pruned_loss=0.05661, over 3933750.09 frames. ], batch size: 226, lr: 7.48e-03, grad_scale: 16.0 +2023-03-28 14:13:55,404 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.714e+02 3.883e+02 4.794e+02 6.298e+02 1.135e+03, threshold=9.588e+02, percent-clipped=2.0 +2023-03-28 14:15:12,024 INFO [train.py:892] (0/4) Epoch 21, batch 1150, loss[loss=0.2118, simple_loss=0.2883, pruned_loss=0.06762, over 19774.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2569, pruned_loss=0.05709, over 3936974.66 frames. ], batch size: 273, lr: 7.47e-03, grad_scale: 16.0 +2023-03-28 14:17:02,220 INFO [train.py:892] (0/4) Epoch 21, batch 1200, loss[loss=0.1832, simple_loss=0.2594, pruned_loss=0.05351, over 19610.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.257, pruned_loss=0.05725, over 3939650.10 frames. ], batch size: 46, lr: 7.47e-03, grad_scale: 32.0 +2023-03-28 14:17:37,168 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.715e+02 4.012e+02 4.628e+02 5.373e+02 1.374e+03, threshold=9.255e+02, percent-clipped=1.0 +2023-03-28 14:18:01,748 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7773, 3.0850, 3.1683, 3.7037, 2.5423, 3.1173, 2.5607, 2.3691], + device='cuda:0'), covar=tensor([0.0509, 0.1822, 0.1002, 0.0398, 0.2101, 0.0782, 0.1238, 0.1678], + device='cuda:0'), in_proj_covar=tensor([0.0227, 0.0330, 0.0239, 0.0183, 0.0240, 0.0195, 0.0208, 0.0212], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 14:18:18,013 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=38335.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:18:53,057 INFO [train.py:892] (0/4) Epoch 21, batch 1250, loss[loss=0.1782, simple_loss=0.2441, pruned_loss=0.05613, over 19845.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2563, pruned_loss=0.05718, over 3942628.93 frames. ], batch size: 137, lr: 7.46e-03, grad_scale: 32.0 +2023-03-28 14:19:12,813 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38360.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:20:46,457 INFO [train.py:892] (0/4) Epoch 21, batch 1300, loss[loss=0.1633, simple_loss=0.2306, pruned_loss=0.04799, over 19838.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2551, pruned_loss=0.05649, over 3944340.75 frames. ], batch size: 43, lr: 7.46e-03, grad_scale: 32.0 +2023-03-28 14:21:18,889 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-28 14:21:20,980 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.582e+02 4.064e+02 4.748e+02 5.705e+02 1.088e+03, threshold=9.497e+02, percent-clipped=1.0 +2023-03-28 14:21:31,799 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38421.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:22:38,960 INFO [train.py:892] (0/4) Epoch 21, batch 1350, loss[loss=0.1698, simple_loss=0.2475, pruned_loss=0.04603, over 19668.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2549, pruned_loss=0.05635, over 3945899.96 frames. ], batch size: 55, lr: 7.45e-03, grad_scale: 32.0 +2023-03-28 14:22:51,732 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-03-28 14:23:53,619 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8955, 4.7472, 5.2946, 4.8215, 4.2553, 5.0207, 4.9349, 5.4356], + device='cuda:0'), covar=tensor([0.0812, 0.0369, 0.0300, 0.0342, 0.0793, 0.0416, 0.0364, 0.0264], + device='cuda:0'), in_proj_covar=tensor([0.0272, 0.0214, 0.0211, 0.0222, 0.0201, 0.0220, 0.0221, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 14:24:34,315 INFO [train.py:892] (0/4) Epoch 21, batch 1400, loss[loss=0.2048, simple_loss=0.2842, pruned_loss=0.06272, over 19896.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2545, pruned_loss=0.05607, over 3949003.38 frames. ], batch size: 71, lr: 7.45e-03, grad_scale: 32.0 +2023-03-28 14:25:09,935 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.194e+02 4.264e+02 4.951e+02 5.984e+02 1.387e+03, threshold=9.901e+02, percent-clipped=2.0 +2023-03-28 14:26:26,395 INFO [train.py:892] (0/4) Epoch 21, batch 1450, loss[loss=0.1949, simple_loss=0.2651, pruned_loss=0.0624, over 19728.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2559, pruned_loss=0.05635, over 3948442.75 frames. ], batch size: 269, lr: 7.45e-03, grad_scale: 32.0 +2023-03-28 14:28:22,047 INFO [train.py:892] (0/4) Epoch 21, batch 1500, loss[loss=0.2381, simple_loss=0.2955, pruned_loss=0.09039, over 19713.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2568, pruned_loss=0.05648, over 3948842.74 frames. ], batch size: 310, lr: 7.44e-03, grad_scale: 32.0 +2023-03-28 14:28:58,334 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.556e+02 4.180e+02 4.793e+02 6.063e+02 1.172e+03, threshold=9.587e+02, percent-clipped=1.0 +2023-03-28 14:29:40,602 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=38635.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:30:15,089 INFO [train.py:892] (0/4) Epoch 21, batch 1550, loss[loss=0.2052, simple_loss=0.2727, pruned_loss=0.06881, over 19758.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2563, pruned_loss=0.0562, over 3948584.52 frames. ], batch size: 256, lr: 7.44e-03, grad_scale: 32.0 +2023-03-28 14:31:30,390 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=38683.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:32:09,954 INFO [train.py:892] (0/4) Epoch 21, batch 1600, loss[loss=0.2957, simple_loss=0.3532, pruned_loss=0.1191, over 19415.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2563, pruned_loss=0.05609, over 3949882.16 frames. ], batch size: 431, lr: 7.43e-03, grad_scale: 32.0 +2023-03-28 14:32:46,933 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=38716.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:32:48,613 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.983e+02 3.919e+02 4.752e+02 6.022e+02 1.186e+03, threshold=9.505e+02, percent-clipped=3.0 +2023-03-28 14:34:02,101 INFO [train.py:892] (0/4) Epoch 21, batch 1650, loss[loss=0.2081, simple_loss=0.2835, pruned_loss=0.06631, over 19719.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2555, pruned_loss=0.05564, over 3950404.06 frames. ], batch size: 310, lr: 7.43e-03, grad_scale: 32.0 +2023-03-28 14:34:03,305 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-03-28 14:35:51,444 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5249, 3.7022, 2.0374, 4.3413, 3.8917, 4.3701, 4.3484, 3.3683], + device='cuda:0'), covar=tensor([0.0574, 0.0471, 0.1580, 0.0505, 0.0583, 0.0358, 0.0569, 0.0694], + device='cuda:0'), in_proj_covar=tensor([0.0137, 0.0134, 0.0140, 0.0140, 0.0122, 0.0125, 0.0136, 0.0139], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 14:35:57,105 INFO [train.py:892] (0/4) Epoch 21, batch 1700, loss[loss=0.1817, simple_loss=0.2451, pruned_loss=0.05912, over 19865.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2552, pruned_loss=0.05514, over 3948361.71 frames. ], batch size: 122, lr: 7.42e-03, grad_scale: 32.0 +2023-03-28 14:36:32,829 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.773e+02 3.919e+02 4.516e+02 5.441e+02 1.197e+03, threshold=9.032e+02, percent-clipped=2.0 +2023-03-28 14:36:48,272 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38823.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:37:21,993 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2678, 4.8022, 4.9473, 4.6740, 5.1891, 3.1355, 3.9175, 2.5388], + device='cuda:0'), covar=tensor([0.0254, 0.0236, 0.0201, 0.0237, 0.0254, 0.1055, 0.1214, 0.1813], + device='cuda:0'), in_proj_covar=tensor([0.0098, 0.0137, 0.0109, 0.0130, 0.0115, 0.0129, 0.0138, 0.0123], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 14:37:39,908 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5035, 4.2028, 4.2361, 4.5042, 4.1349, 4.5588, 4.6254, 4.7858], + device='cuda:0'), covar=tensor([0.0617, 0.0404, 0.0502, 0.0363, 0.0717, 0.0449, 0.0390, 0.0312], + device='cuda:0'), in_proj_covar=tensor([0.0141, 0.0162, 0.0188, 0.0161, 0.0160, 0.0145, 0.0141, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 14:37:43,277 INFO [train.py:892] (0/4) Epoch 21, batch 1750, loss[loss=0.193, simple_loss=0.2608, pruned_loss=0.06256, over 19740.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2558, pruned_loss=0.05551, over 3947367.29 frames. ], batch size: 209, lr: 7.42e-03, grad_scale: 32.0 +2023-03-28 14:38:35,261 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7787, 2.9040, 4.1726, 3.1711, 3.5585, 3.4460, 2.3844, 2.5338], + device='cuda:0'), covar=tensor([0.0966, 0.3127, 0.0546, 0.1021, 0.1668, 0.1329, 0.2285, 0.2531], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0369, 0.0324, 0.0261, 0.0361, 0.0339, 0.0344, 0.0313], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 14:38:47,343 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38884.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:39:18,396 INFO [train.py:892] (0/4) Epoch 21, batch 1800, loss[loss=0.19, simple_loss=0.269, pruned_loss=0.05545, over 19820.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2553, pruned_loss=0.05533, over 3949342.26 frames. ], batch size: 57, lr: 7.41e-03, grad_scale: 32.0 +2023-03-28 14:39:48,597 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.425e+02 4.042e+02 5.128e+02 6.021e+02 1.055e+03, threshold=1.026e+03, percent-clipped=2.0 +2023-03-28 14:40:47,454 INFO [train.py:892] (0/4) Epoch 21, batch 1850, loss[loss=0.1823, simple_loss=0.2753, pruned_loss=0.04465, over 19577.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2566, pruned_loss=0.0551, over 3949688.97 frames. ], batch size: 53, lr: 7.41e-03, grad_scale: 32.0 +2023-03-28 14:40:55,111 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-21.pt +2023-03-28 14:41:51,760 INFO [train.py:892] (0/4) Epoch 22, batch 0, loss[loss=0.1743, simple_loss=0.2435, pruned_loss=0.05255, over 19791.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2435, pruned_loss=0.05255, over 19791.00 frames. ], batch size: 162, lr: 7.23e-03, grad_scale: 32.0 +2023-03-28 14:41:51,762 INFO [train.py:917] (0/4) Computing validation loss +2023-03-28 14:42:30,100 INFO [train.py:926] (0/4) Epoch 22, validation: loss=0.1727, simple_loss=0.2482, pruned_loss=0.04859, over 2883724.00 frames. +2023-03-28 14:42:30,102 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-28 14:42:36,023 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.5142, 1.6075, 1.4589, 0.8306, 1.4819, 1.5263, 1.4622, 1.5066], + device='cuda:0'), covar=tensor([0.0312, 0.0264, 0.0326, 0.0575, 0.0454, 0.0275, 0.0264, 0.0261], + device='cuda:0'), in_proj_covar=tensor([0.0085, 0.0081, 0.0087, 0.0091, 0.0094, 0.0071, 0.0069, 0.0071], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 14:44:26,118 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1488, 3.1507, 2.9922, 2.6332, 3.0443, 2.5978, 2.3878, 1.7424], + device='cuda:0'), covar=tensor([0.0287, 0.0279, 0.0197, 0.0271, 0.0187, 0.0738, 0.0751, 0.1601], + device='cuda:0'), in_proj_covar=tensor([0.0099, 0.0138, 0.0110, 0.0130, 0.0115, 0.0129, 0.0140, 0.0123], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 14:44:29,522 INFO [train.py:892] (0/4) Epoch 22, batch 50, loss[loss=0.1694, simple_loss=0.2537, pruned_loss=0.04258, over 19941.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2534, pruned_loss=0.05484, over 888888.09 frames. ], batch size: 52, lr: 7.23e-03, grad_scale: 32.0 +2023-03-28 14:44:52,586 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39016.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:44:53,815 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.550e+02 3.535e+02 4.354e+02 5.558e+02 1.145e+03, threshold=8.708e+02, percent-clipped=3.0 +2023-03-28 14:46:17,749 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5094, 2.8969, 2.9764, 3.4462, 2.3794, 3.0536, 2.3284, 2.2162], + device='cuda:0'), covar=tensor([0.0536, 0.1640, 0.1055, 0.0422, 0.2147, 0.0761, 0.1322, 0.1693], + device='cuda:0'), in_proj_covar=tensor([0.0231, 0.0334, 0.0242, 0.0187, 0.0242, 0.0198, 0.0210, 0.0214], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 14:46:23,790 INFO [train.py:892] (0/4) Epoch 22, batch 100, loss[loss=0.1967, simple_loss=0.2669, pruned_loss=0.0633, over 19780.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.25, pruned_loss=0.05274, over 1569172.83 frames. ], batch size: 236, lr: 7.22e-03, grad_scale: 32.0 +2023-03-28 14:46:42,967 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39064.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:46:50,950 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-28 14:47:22,984 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3936, 4.5220, 2.7182, 4.7227, 4.9142, 2.0930, 4.1829, 3.5965], + device='cuda:0'), covar=tensor([0.0640, 0.0701, 0.2586, 0.0899, 0.0633, 0.2808, 0.0990, 0.0771], + device='cuda:0'), in_proj_covar=tensor([0.0223, 0.0247, 0.0226, 0.0257, 0.0230, 0.0202, 0.0233, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 14:48:19,883 INFO [train.py:892] (0/4) Epoch 22, batch 150, loss[loss=0.168, simple_loss=0.249, pruned_loss=0.04354, over 19716.00 frames. ], tot_loss[loss=0.181, simple_loss=0.253, pruned_loss=0.05446, over 2096246.43 frames. ], batch size: 81, lr: 7.22e-03, grad_scale: 32.0 +2023-03-28 14:48:48,470 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.956e+02 3.952e+02 4.652e+02 5.506e+02 8.622e+02, threshold=9.304e+02, percent-clipped=0.0 +2023-03-28 14:50:18,184 INFO [train.py:892] (0/4) Epoch 22, batch 200, loss[loss=0.1835, simple_loss=0.2453, pruned_loss=0.06089, over 19845.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2544, pruned_loss=0.05487, over 2507358.49 frames. ], batch size: 124, lr: 7.22e-03, grad_scale: 32.0 +2023-03-28 14:51:07,052 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39179.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:51:21,859 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39185.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:52:08,845 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2878, 3.2805, 5.0197, 4.2735, 4.7932, 4.9427, 4.8832, 4.5790], + device='cuda:0'), covar=tensor([0.0308, 0.0690, 0.0081, 0.0830, 0.0101, 0.0150, 0.0104, 0.0107], + device='cuda:0'), in_proj_covar=tensor([0.0090, 0.0096, 0.0081, 0.0147, 0.0076, 0.0090, 0.0084, 0.0077], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 14:52:10,112 INFO [train.py:892] (0/4) Epoch 22, batch 250, loss[loss=0.1611, simple_loss=0.2315, pruned_loss=0.04531, over 19782.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2528, pruned_loss=0.05431, over 2827789.83 frames. ], batch size: 46, lr: 7.21e-03, grad_scale: 32.0 +2023-03-28 14:52:34,873 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.591e+02 3.931e+02 4.730e+02 5.720e+02 9.531e+02, threshold=9.460e+02, percent-clipped=1.0 +2023-03-28 14:53:42,608 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39246.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:54:03,335 INFO [train.py:892] (0/4) Epoch 22, batch 300, loss[loss=0.1511, simple_loss=0.2286, pruned_loss=0.03686, over 19888.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2531, pruned_loss=0.05388, over 3077515.44 frames. ], batch size: 47, lr: 7.21e-03, grad_scale: 32.0 +2023-03-28 14:54:45,497 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5605, 4.1983, 4.2802, 4.5221, 4.2546, 4.6025, 4.6660, 4.7986], + device='cuda:0'), covar=tensor([0.0544, 0.0358, 0.0463, 0.0341, 0.0580, 0.0403, 0.0384, 0.0284], + device='cuda:0'), in_proj_covar=tensor([0.0144, 0.0166, 0.0192, 0.0165, 0.0164, 0.0149, 0.0145, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 14:54:45,575 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4078, 2.8621, 4.3328, 3.8422, 4.1474, 4.3156, 4.2679, 4.0826], + device='cuda:0'), covar=tensor([0.0410, 0.0754, 0.0095, 0.0568, 0.0133, 0.0181, 0.0134, 0.0132], + device='cuda:0'), in_proj_covar=tensor([0.0091, 0.0097, 0.0081, 0.0148, 0.0076, 0.0090, 0.0085, 0.0078], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 14:55:07,882 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39284.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:55:22,905 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4837, 3.3947, 3.8364, 2.6160, 3.9638, 3.0406, 3.2469, 4.0007], + device='cuda:0'), covar=tensor([0.0635, 0.0404, 0.0507, 0.0834, 0.0299, 0.0393, 0.0513, 0.0260], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0077, 0.0075, 0.0105, 0.0071, 0.0072, 0.0069, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 14:55:28,943 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6400, 2.8423, 3.4993, 3.1390, 3.8023, 3.7353, 4.4951, 4.9695], + device='cuda:0'), covar=tensor([0.0528, 0.1832, 0.1440, 0.1968, 0.1711, 0.1370, 0.0521, 0.0388], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0236, 0.0257, 0.0247, 0.0285, 0.0248, 0.0217, 0.0238], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 14:55:59,301 INFO [train.py:892] (0/4) Epoch 22, batch 350, loss[loss=0.1775, simple_loss=0.2547, pruned_loss=0.05018, over 19733.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2539, pruned_loss=0.0543, over 3271562.05 frames. ], batch size: 99, lr: 7.20e-03, grad_scale: 32.0 +2023-03-28 14:56:21,664 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.483e+02 3.997e+02 4.728e+02 5.910e+02 1.079e+03, threshold=9.457e+02, percent-clipped=2.0 +2023-03-28 14:57:27,917 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39345.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 14:57:49,321 INFO [train.py:892] (0/4) Epoch 22, batch 400, loss[loss=0.1749, simple_loss=0.2594, pruned_loss=0.04522, over 19802.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2558, pruned_loss=0.0553, over 3420990.16 frames. ], batch size: 51, lr: 7.20e-03, grad_scale: 32.0 +2023-03-28 14:58:12,317 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4899, 4.5207, 4.8904, 4.6911, 4.7927, 4.3502, 4.5775, 4.4297], + device='cuda:0'), covar=tensor([0.1466, 0.1377, 0.0899, 0.1234, 0.0816, 0.0855, 0.1976, 0.2058], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0299, 0.0343, 0.0273, 0.0256, 0.0258, 0.0336, 0.0361], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 14:59:22,598 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-28 14:59:41,761 INFO [train.py:892] (0/4) Epoch 22, batch 450, loss[loss=0.1704, simple_loss=0.2472, pruned_loss=0.04683, over 19870.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2565, pruned_loss=0.05585, over 3538033.04 frames. ], batch size: 77, lr: 7.19e-03, grad_scale: 32.0 +2023-03-28 15:00:08,522 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.964e+02 3.895e+02 4.549e+02 5.283e+02 8.975e+02, threshold=9.097e+02, percent-clipped=0.0 +2023-03-28 15:00:11,676 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9576, 5.0992, 5.4073, 5.1891, 5.2019, 4.7676, 5.0621, 4.9072], + device='cuda:0'), covar=tensor([0.1441, 0.1604, 0.0997, 0.1280, 0.0760, 0.0957, 0.2024, 0.2234], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0303, 0.0348, 0.0277, 0.0260, 0.0262, 0.0341, 0.0365], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 15:00:30,069 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.83 vs. limit=5.0 +2023-03-28 15:01:13,141 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2976, 3.2866, 3.1792, 2.9650, 3.2736, 2.6486, 2.6120, 1.5695], + device='cuda:0'), covar=tensor([0.0276, 0.0264, 0.0194, 0.0224, 0.0195, 0.1320, 0.0706, 0.1866], + device='cuda:0'), in_proj_covar=tensor([0.0099, 0.0138, 0.0110, 0.0129, 0.0114, 0.0128, 0.0139, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 15:01:37,220 INFO [train.py:892] (0/4) Epoch 22, batch 500, loss[loss=0.1666, simple_loss=0.2466, pruned_loss=0.04329, over 19855.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2581, pruned_loss=0.05723, over 3628729.56 frames. ], batch size: 49, lr: 7.19e-03, grad_scale: 32.0 +2023-03-28 15:01:51,182 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.7967, 6.0388, 6.0668, 5.9595, 5.7531, 6.0178, 5.3509, 5.4766], + device='cuda:0'), covar=tensor([0.0371, 0.0411, 0.0509, 0.0384, 0.0524, 0.0546, 0.0694, 0.0977], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0255, 0.0276, 0.0240, 0.0238, 0.0229, 0.0246, 0.0291], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 15:01:53,177 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39463.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:02:31,879 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39479.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:03:33,874 INFO [train.py:892] (0/4) Epoch 22, batch 550, loss[loss=0.2625, simple_loss=0.3348, pruned_loss=0.09515, over 19469.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2574, pruned_loss=0.05721, over 3699637.89 frames. ], batch size: 396, lr: 7.18e-03, grad_scale: 32.0 +2023-03-28 15:03:58,571 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.760e+02 4.104e+02 4.987e+02 6.113e+02 1.669e+03, threshold=9.973e+02, percent-clipped=4.0 +2023-03-28 15:04:16,100 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39524.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:04:22,044 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39527.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:04:51,760 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39541.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:05:26,083 INFO [train.py:892] (0/4) Epoch 22, batch 600, loss[loss=0.1561, simple_loss=0.2269, pruned_loss=0.04266, over 19847.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2563, pruned_loss=0.05608, over 3754962.12 frames. ], batch size: 115, lr: 7.18e-03, grad_scale: 32.0 +2023-03-28 15:06:13,398 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9877, 5.1543, 5.4594, 5.1623, 5.2372, 4.9233, 5.1045, 4.8510], + device='cuda:0'), covar=tensor([0.1418, 0.1434, 0.0902, 0.1225, 0.0699, 0.0837, 0.2098, 0.2049], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0303, 0.0348, 0.0277, 0.0259, 0.0261, 0.0339, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 15:07:20,941 INFO [train.py:892] (0/4) Epoch 22, batch 650, loss[loss=0.1909, simple_loss=0.2672, pruned_loss=0.05729, over 19699.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2553, pruned_loss=0.05561, over 3798590.75 frames. ], batch size: 265, lr: 7.17e-03, grad_scale: 32.0 +2023-03-28 15:07:45,280 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.674e+02 3.926e+02 4.968e+02 5.717e+02 7.985e+02, threshold=9.935e+02, percent-clipped=0.0 +2023-03-28 15:07:53,531 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4198, 4.5685, 2.5953, 4.8430, 5.0935, 2.1794, 4.2890, 3.5342], + device='cuda:0'), covar=tensor([0.0648, 0.0578, 0.2669, 0.0640, 0.0354, 0.2738, 0.0801, 0.0784], + device='cuda:0'), in_proj_covar=tensor([0.0222, 0.0246, 0.0224, 0.0257, 0.0230, 0.0199, 0.0232, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 15:08:09,472 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0095, 2.5918, 2.9269, 3.2208, 3.7681, 4.3116, 4.0906, 4.3129], + device='cuda:0'), covar=tensor([0.0920, 0.1690, 0.1351, 0.0646, 0.0322, 0.0183, 0.0288, 0.0356], + device='cuda:0'), in_proj_covar=tensor([0.0153, 0.0168, 0.0170, 0.0141, 0.0124, 0.0119, 0.0113, 0.0107], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 15:08:31,206 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.45 vs. limit=2.0 +2023-03-28 15:08:37,475 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39639.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 15:08:39,390 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39640.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:09:12,692 INFO [train.py:892] (0/4) Epoch 22, batch 700, loss[loss=0.1867, simple_loss=0.2662, pruned_loss=0.05362, over 19802.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2562, pruned_loss=0.05547, over 3831902.24 frames. ], batch size: 51, lr: 7.17e-03, grad_scale: 32.0 +2023-03-28 15:10:53,868 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39700.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 15:11:02,018 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9196, 3.5944, 3.7420, 3.9391, 3.6752, 3.8733, 4.0209, 4.1594], + device='cuda:0'), covar=tensor([0.0666, 0.0448, 0.0527, 0.0361, 0.0664, 0.0575, 0.0417, 0.0321], + device='cuda:0'), in_proj_covar=tensor([0.0143, 0.0164, 0.0189, 0.0162, 0.0162, 0.0146, 0.0141, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 15:11:05,442 INFO [train.py:892] (0/4) Epoch 22, batch 750, loss[loss=0.1734, simple_loss=0.244, pruned_loss=0.05138, over 19824.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2557, pruned_loss=0.05513, over 3859209.08 frames. ], batch size: 147, lr: 7.17e-03, grad_scale: 16.0 +2023-03-28 15:11:32,103 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.919e+02 4.119e+02 5.270e+02 6.689e+02 1.138e+03, threshold=1.054e+03, percent-clipped=1.0 +2023-03-28 15:13:01,461 INFO [train.py:892] (0/4) Epoch 22, batch 800, loss[loss=0.1671, simple_loss=0.2398, pruned_loss=0.04725, over 19712.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2562, pruned_loss=0.05522, over 3877720.54 frames. ], batch size: 85, lr: 7.16e-03, grad_scale: 16.0 +2023-03-28 15:13:16,351 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6646, 2.1738, 2.5531, 2.9448, 3.2864, 3.4422, 3.3824, 3.4671], + device='cuda:0'), covar=tensor([0.0889, 0.1594, 0.1131, 0.0600, 0.0415, 0.0302, 0.0396, 0.0345], + device='cuda:0'), in_proj_covar=tensor([0.0150, 0.0166, 0.0167, 0.0139, 0.0122, 0.0118, 0.0112, 0.0106], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 15:13:24,572 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39767.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:13:34,919 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1144, 2.9932, 3.2687, 2.4515, 3.3739, 2.7900, 3.1679, 3.1684], + device='cuda:0'), covar=tensor([0.0491, 0.0433, 0.0413, 0.0791, 0.0303, 0.0409, 0.0364, 0.0406], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0079, 0.0075, 0.0106, 0.0072, 0.0072, 0.0070, 0.0064], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 15:13:50,064 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.2338, 2.2563, 2.3826, 2.2253, 2.2866, 2.3728, 2.2963, 2.3904], + device='cuda:0'), covar=tensor([0.0266, 0.0312, 0.0280, 0.0253, 0.0350, 0.0263, 0.0366, 0.0270], + device='cuda:0'), in_proj_covar=tensor([0.0071, 0.0065, 0.0069, 0.0062, 0.0076, 0.0070, 0.0087, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-03-28 15:14:52,000 INFO [train.py:892] (0/4) Epoch 22, batch 850, loss[loss=0.1807, simple_loss=0.2488, pruned_loss=0.05632, over 19761.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2558, pruned_loss=0.05503, over 3894440.68 frames. ], batch size: 188, lr: 7.16e-03, grad_scale: 16.0 +2023-03-28 15:15:12,697 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39815.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:15:18,343 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.581e+02 3.870e+02 4.647e+02 5.944e+02 1.250e+03, threshold=9.293e+02, percent-clipped=1.0 +2023-03-28 15:15:21,070 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39819.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:15:42,406 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39828.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:16:09,903 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39841.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:16:42,173 INFO [train.py:892] (0/4) Epoch 22, batch 900, loss[loss=0.2121, simple_loss=0.2853, pruned_loss=0.06943, over 19639.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2545, pruned_loss=0.05428, over 3907259.48 frames. ], batch size: 68, lr: 7.15e-03, grad_scale: 16.0 +2023-03-28 15:17:15,903 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.9225, 2.1510, 1.9622, 1.2939, 1.9633, 2.0799, 1.9662, 2.0702], + device='cuda:0'), covar=tensor([0.0332, 0.0269, 0.0285, 0.0581, 0.0386, 0.0272, 0.0263, 0.0245], + device='cuda:0'), in_proj_covar=tensor([0.0088, 0.0083, 0.0090, 0.0094, 0.0097, 0.0073, 0.0073, 0.0074], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 15:17:30,770 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39876.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:18:01,227 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39889.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:18:37,562 INFO [train.py:892] (0/4) Epoch 22, batch 950, loss[loss=0.1653, simple_loss=0.2462, pruned_loss=0.04219, over 19626.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2552, pruned_loss=0.05409, over 3914559.60 frames. ], batch size: 52, lr: 7.15e-03, grad_scale: 16.0 +2023-03-28 15:19:06,558 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.773e+02 4.111e+02 4.981e+02 6.379e+02 1.110e+03, threshold=9.962e+02, percent-clipped=1.0 +2023-03-28 15:19:56,036 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39940.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:20:28,356 INFO [train.py:892] (0/4) Epoch 22, batch 1000, loss[loss=0.2235, simple_loss=0.2913, pruned_loss=0.07785, over 19715.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2568, pruned_loss=0.05502, over 3921299.67 frames. ], batch size: 305, lr: 7.14e-03, grad_scale: 16.0 +2023-03-28 15:20:44,319 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.81 vs. limit=5.0 +2023-03-28 15:21:15,476 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0836, 2.9446, 4.7043, 3.1895, 3.7481, 3.5504, 2.4275, 2.7055], + device='cuda:0'), covar=tensor([0.1108, 0.3534, 0.0582, 0.1229, 0.2104, 0.1558, 0.2765, 0.2676], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0368, 0.0326, 0.0262, 0.0363, 0.0344, 0.0347, 0.0317], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 15:21:42,493 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39988.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:21:59,189 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39995.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 15:22:08,859 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-40000.pt +2023-03-28 15:22:27,073 INFO [train.py:892] (0/4) Epoch 22, batch 1050, loss[loss=0.1715, simple_loss=0.2617, pruned_loss=0.04063, over 19771.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.256, pruned_loss=0.05466, over 3927813.68 frames. ], batch size: 53, lr: 7.14e-03, grad_scale: 16.0 +2023-03-28 15:22:53,659 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.755e+02 4.195e+02 4.862e+02 6.171e+02 1.256e+03, threshold=9.724e+02, percent-clipped=2.0 +2023-03-28 15:23:13,102 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3578, 5.7063, 5.9530, 5.7576, 5.5740, 5.4115, 5.6093, 5.4826], + device='cuda:0'), covar=tensor([0.1308, 0.1093, 0.0804, 0.1149, 0.0661, 0.0793, 0.1848, 0.1980], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0301, 0.0345, 0.0277, 0.0258, 0.0259, 0.0337, 0.0363], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 15:23:51,114 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4851, 2.4854, 2.6537, 2.0951, 2.7183, 2.2669, 2.6537, 2.6410], + device='cuda:0'), covar=tensor([0.0418, 0.0513, 0.0415, 0.0825, 0.0364, 0.0444, 0.0461, 0.0344], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0078, 0.0075, 0.0104, 0.0071, 0.0072, 0.0070, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 15:24:16,345 INFO [train.py:892] (0/4) Epoch 22, batch 1100, loss[loss=0.1849, simple_loss=0.2608, pruned_loss=0.05454, over 19765.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2557, pruned_loss=0.05465, over 3932868.75 frames. ], batch size: 217, lr: 7.13e-03, grad_scale: 16.0 +2023-03-28 15:26:09,784 INFO [train.py:892] (0/4) Epoch 22, batch 1150, loss[loss=0.1958, simple_loss=0.2541, pruned_loss=0.06871, over 19834.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.256, pruned_loss=0.0553, over 3937109.16 frames. ], batch size: 128, lr: 7.13e-03, grad_scale: 16.0 +2023-03-28 15:26:31,144 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-28 15:26:33,617 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.627e+02 4.168e+02 4.947e+02 6.030e+02 1.277e+03, threshold=9.894e+02, percent-clipped=1.0 +2023-03-28 15:26:37,661 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40119.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:26:46,578 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40123.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:28:02,030 INFO [train.py:892] (0/4) Epoch 22, batch 1200, loss[loss=0.1817, simple_loss=0.252, pruned_loss=0.05569, over 19893.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2551, pruned_loss=0.05465, over 3939436.44 frames. ], batch size: 94, lr: 7.13e-03, grad_scale: 16.0 +2023-03-28 15:28:12,553 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9327, 2.8083, 3.1492, 2.3843, 3.1023, 2.6366, 2.9705, 3.2300], + device='cuda:0'), covar=tensor([0.0453, 0.0479, 0.0481, 0.0757, 0.0367, 0.0406, 0.0399, 0.0256], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0078, 0.0074, 0.0103, 0.0071, 0.0072, 0.0069, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 15:28:26,257 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40167.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:28:34,878 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40171.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:29:31,888 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40195.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:29:54,916 INFO [train.py:892] (0/4) Epoch 22, batch 1250, loss[loss=0.1775, simple_loss=0.256, pruned_loss=0.04946, over 19674.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2549, pruned_loss=0.05476, over 3941127.49 frames. ], batch size: 52, lr: 7.12e-03, grad_scale: 16.0 +2023-03-28 15:30:21,939 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.511e+02 3.858e+02 4.574e+02 5.518e+02 1.243e+03, threshold=9.148e+02, percent-clipped=1.0 +2023-03-28 15:30:22,996 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40218.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:31:49,167 INFO [train.py:892] (0/4) Epoch 22, batch 1300, loss[loss=0.1741, simple_loss=0.2519, pruned_loss=0.04815, over 19785.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2546, pruned_loss=0.05487, over 3943626.74 frames. ], batch size: 73, lr: 7.12e-03, grad_scale: 16.0 +2023-03-28 15:31:50,149 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40256.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:32:25,766 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40272.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:32:41,610 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40279.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:33:20,512 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40295.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 15:33:30,601 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5859, 2.5573, 2.8018, 2.6609, 2.5703, 2.7162, 2.5455, 2.7162], + device='cuda:0'), covar=tensor([0.0265, 0.0322, 0.0250, 0.0265, 0.0363, 0.0254, 0.0368, 0.0283], + device='cuda:0'), in_proj_covar=tensor([0.0071, 0.0065, 0.0069, 0.0062, 0.0075, 0.0069, 0.0087, 0.0061], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-03-28 15:33:43,542 INFO [train.py:892] (0/4) Epoch 22, batch 1350, loss[loss=0.1632, simple_loss=0.2301, pruned_loss=0.04819, over 19768.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2546, pruned_loss=0.05447, over 3946162.49 frames. ], batch size: 125, lr: 7.11e-03, grad_scale: 16.0 +2023-03-28 15:34:11,586 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.667e+02 4.340e+02 4.978e+02 5.730e+02 1.244e+03, threshold=9.955e+02, percent-clipped=1.0 +2023-03-28 15:34:39,492 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40330.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:34:46,373 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40333.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:34:51,471 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-28 15:34:56,294 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-28 15:35:09,332 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40343.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 15:35:35,539 INFO [train.py:892] (0/4) Epoch 22, batch 1400, loss[loss=0.169, simple_loss=0.2323, pruned_loss=0.0529, over 19656.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2547, pruned_loss=0.05461, over 3947351.12 frames. ], batch size: 43, lr: 7.11e-03, grad_scale: 16.0 +2023-03-28 15:36:58,838 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40391.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:37:29,880 INFO [train.py:892] (0/4) Epoch 22, batch 1450, loss[loss=0.1804, simple_loss=0.2545, pruned_loss=0.05314, over 19931.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2561, pruned_loss=0.05527, over 3946519.34 frames. ], batch size: 49, lr: 7.10e-03, grad_scale: 16.0 +2023-03-28 15:37:57,982 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.955e+02 4.197e+02 4.850e+02 6.324e+02 1.307e+03, threshold=9.700e+02, percent-clipped=2.0 +2023-03-28 15:38:11,674 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40423.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:38:53,899 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.73 vs. limit=5.0 +2023-03-28 15:39:20,787 INFO [train.py:892] (0/4) Epoch 22, batch 1500, loss[loss=0.17, simple_loss=0.2527, pruned_loss=0.04361, over 19793.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2549, pruned_loss=0.05474, over 3947491.48 frames. ], batch size: 83, lr: 7.10e-03, grad_scale: 16.0 +2023-03-28 15:39:34,425 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0866, 2.4317, 3.0070, 3.2413, 3.6157, 4.1179, 3.9148, 4.0439], + device='cuda:0'), covar=tensor([0.0824, 0.1806, 0.1247, 0.0640, 0.0392, 0.0198, 0.0325, 0.0381], + device='cuda:0'), in_proj_covar=tensor([0.0155, 0.0170, 0.0173, 0.0144, 0.0126, 0.0122, 0.0115, 0.0110], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 15:39:49,632 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.99 vs. limit=5.0 +2023-03-28 15:39:55,559 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40471.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:39:55,674 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40471.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:40:41,422 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40492.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:40:58,327 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-03-28 15:41:12,875 INFO [train.py:892] (0/4) Epoch 22, batch 1550, loss[loss=0.1744, simple_loss=0.2553, pruned_loss=0.04678, over 19669.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2546, pruned_loss=0.05431, over 3948865.06 frames. ], batch size: 73, lr: 7.10e-03, grad_scale: 16.0 +2023-03-28 15:41:32,456 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.53 vs. limit=5.0 +2023-03-28 15:41:39,827 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.411e+02 3.822e+02 4.612e+02 5.584e+02 1.112e+03, threshold=9.223e+02, percent-clipped=2.0 +2023-03-28 15:41:43,036 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40519.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:42:25,313 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5481, 4.4068, 4.4365, 4.7448, 4.5511, 5.1148, 4.7720, 4.9041], + device='cuda:0'), covar=tensor([0.0957, 0.0528, 0.0676, 0.0454, 0.0807, 0.0398, 0.0604, 0.0643], + device='cuda:0'), in_proj_covar=tensor([0.0144, 0.0165, 0.0189, 0.0164, 0.0163, 0.0147, 0.0142, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 15:42:54,680 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40551.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:42:59,528 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40553.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:43:05,443 INFO [train.py:892] (0/4) Epoch 22, batch 1600, loss[loss=0.1843, simple_loss=0.2537, pruned_loss=0.05739, over 19753.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2542, pruned_loss=0.05415, over 3950594.18 frames. ], batch size: 213, lr: 7.09e-03, grad_scale: 16.0 +2023-03-28 15:43:46,440 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40574.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:44:55,487 INFO [train.py:892] (0/4) Epoch 22, batch 1650, loss[loss=0.1719, simple_loss=0.2431, pruned_loss=0.0504, over 19787.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2539, pruned_loss=0.05376, over 3950622.68 frames. ], batch size: 178, lr: 7.09e-03, grad_scale: 16.0 +2023-03-28 15:45:24,172 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.699e+02 4.293e+02 4.863e+02 5.850e+02 9.887e+02, threshold=9.726e+02, percent-clipped=2.0 +2023-03-28 15:45:46,585 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40628.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:46:34,576 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40651.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:46:46,988 INFO [train.py:892] (0/4) Epoch 22, batch 1700, loss[loss=0.1757, simple_loss=0.2488, pruned_loss=0.0513, over 19833.00 frames. ], tot_loss[loss=0.181, simple_loss=0.254, pruned_loss=0.054, over 3950369.41 frames. ], batch size: 184, lr: 7.08e-03, grad_scale: 16.0 +2023-03-28 15:47:52,434 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40686.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:48:31,904 INFO [train.py:892] (0/4) Epoch 22, batch 1750, loss[loss=0.1629, simple_loss=0.2467, pruned_loss=0.03955, over 19781.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2541, pruned_loss=0.05419, over 3950441.51 frames. ], batch size: 52, lr: 7.08e-03, grad_scale: 16.0 +2023-03-28 15:48:44,930 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40712.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:48:56,707 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.471e+02 3.805e+02 4.645e+02 5.444e+02 1.360e+03, threshold=9.290e+02, percent-clipped=2.0 +2023-03-28 15:50:07,466 INFO [train.py:892] (0/4) Epoch 22, batch 1800, loss[loss=0.16, simple_loss=0.2322, pruned_loss=0.04393, over 19786.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2544, pruned_loss=0.05415, over 3948423.85 frames. ], batch size: 178, lr: 7.07e-03, grad_scale: 16.0 +2023-03-28 15:50:48,753 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-03-28 15:51:37,484 INFO [train.py:892] (0/4) Epoch 22, batch 1850, loss[loss=0.2183, simple_loss=0.3024, pruned_loss=0.06707, over 19819.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2562, pruned_loss=0.05411, over 3949113.89 frames. ], batch size: 57, lr: 7.07e-03, grad_scale: 16.0 +2023-03-28 15:51:45,026 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-22.pt +2023-03-28 15:52:42,421 INFO [train.py:892] (0/4) Epoch 23, batch 0, loss[loss=0.1733, simple_loss=0.2411, pruned_loss=0.05271, over 19792.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2411, pruned_loss=0.05271, over 19792.00 frames. ], batch size: 178, lr: 6.91e-03, grad_scale: 16.0 +2023-03-28 15:52:42,422 INFO [train.py:917] (0/4) Computing validation loss +2023-03-28 15:53:13,005 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8665, 3.7147, 3.7277, 3.9567, 3.8040, 3.6895, 3.9745, 4.1494], + device='cuda:0'), covar=tensor([0.0605, 0.0391, 0.0454, 0.0306, 0.0552, 0.0593, 0.0403, 0.0260], + device='cuda:0'), in_proj_covar=tensor([0.0144, 0.0166, 0.0190, 0.0164, 0.0163, 0.0148, 0.0143, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 15:53:16,258 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0548, 2.7901, 3.4646, 3.3535, 3.7864, 4.1839, 4.1638, 4.1138], + device='cuda:0'), covar=tensor([0.0755, 0.1493, 0.0926, 0.0583, 0.0309, 0.0204, 0.0260, 0.0540], + device='cuda:0'), in_proj_covar=tensor([0.0155, 0.0168, 0.0173, 0.0144, 0.0125, 0.0121, 0.0115, 0.0109], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 15:53:21,303 INFO [train.py:926] (0/4) Epoch 23, validation: loss=0.1723, simple_loss=0.2475, pruned_loss=0.04853, over 2883724.00 frames. +2023-03-28 15:53:21,306 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-28 15:53:38,437 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.877e+02 3.829e+02 4.262e+02 4.921e+02 1.071e+03, threshold=8.525e+02, percent-clipped=1.0 +2023-03-28 15:53:57,086 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.65 vs. limit=5.0 +2023-03-28 15:54:45,655 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-03-28 15:54:47,557 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40848.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:54:52,261 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7744, 3.4046, 3.6178, 3.3672, 3.9021, 3.8792, 4.6139, 5.0927], + device='cuda:0'), covar=tensor([0.0382, 0.1439, 0.1335, 0.1872, 0.1554, 0.1261, 0.0529, 0.0345], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0237, 0.0260, 0.0249, 0.0290, 0.0250, 0.0218, 0.0242], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 15:54:56,679 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40851.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:55:18,633 INFO [train.py:892] (0/4) Epoch 23, batch 50, loss[loss=0.161, simple_loss=0.2436, pruned_loss=0.03922, over 19726.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2477, pruned_loss=0.05067, over 891466.99 frames. ], batch size: 51, lr: 6.91e-03, grad_scale: 16.0 +2023-03-28 15:55:28,475 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40865.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:55:48,654 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40874.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:56:49,582 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40899.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:56:51,915 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40900.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:57:16,968 INFO [train.py:892] (0/4) Epoch 23, batch 100, loss[loss=0.2136, simple_loss=0.2905, pruned_loss=0.06838, over 19645.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2509, pruned_loss=0.05291, over 1569887.96 frames. ], batch size: 343, lr: 6.90e-03, grad_scale: 16.0 +2023-03-28 15:57:32,516 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.530e+02 4.075e+02 4.882e+02 5.777e+02 1.089e+03, threshold=9.764e+02, percent-clipped=3.0 +2023-03-28 15:57:44,336 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40922.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:57:53,720 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40926.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:57:58,238 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40928.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:59:13,802 INFO [train.py:892] (0/4) Epoch 23, batch 150, loss[loss=0.1595, simple_loss=0.2348, pruned_loss=0.04209, over 19595.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2492, pruned_loss=0.05128, over 2097549.88 frames. ], batch size: 45, lr: 6.90e-03, grad_scale: 16.0 +2023-03-28 15:59:14,831 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40961.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 15:59:44,675 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0392, 4.7173, 4.9663, 4.5999, 4.3827, 4.9400, 4.8046, 5.2447], + device='cuda:0'), covar=tensor([0.1729, 0.0446, 0.0710, 0.0448, 0.0693, 0.0539, 0.0541, 0.0549], + device='cuda:0'), in_proj_covar=tensor([0.0274, 0.0214, 0.0213, 0.0223, 0.0201, 0.0226, 0.0222, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 15:59:46,823 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40976.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 15:59:47,408 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.03 vs. limit=5.0 +2023-03-28 16:00:11,379 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40986.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:00:14,742 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.78 vs. limit=5.0 +2023-03-28 16:00:40,823 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40997.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:01:01,971 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41007.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:01:09,790 INFO [train.py:892] (0/4) Epoch 23, batch 200, loss[loss=0.1698, simple_loss=0.2386, pruned_loss=0.05046, over 19827.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2503, pruned_loss=0.05178, over 2509195.93 frames. ], batch size: 177, lr: 6.89e-03, grad_scale: 16.0 +2023-03-28 16:01:24,979 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.565e+02 4.412e+02 5.143e+02 6.492e+02 1.088e+03, threshold=1.029e+03, percent-clipped=2.0 +2023-03-28 16:01:35,067 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5805, 2.5685, 2.8633, 2.6065, 2.9692, 2.9211, 3.4091, 3.6936], + device='cuda:0'), covar=tensor([0.0676, 0.1709, 0.1625, 0.2019, 0.1561, 0.1511, 0.0664, 0.0638], + device='cuda:0'), in_proj_covar=tensor([0.0240, 0.0234, 0.0257, 0.0247, 0.0286, 0.0249, 0.0216, 0.0239], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 16:02:08,279 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41034.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:02:30,259 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41045.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:03:00,045 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41058.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:03:05,853 INFO [train.py:892] (0/4) Epoch 23, batch 250, loss[loss=0.1821, simple_loss=0.2634, pruned_loss=0.05043, over 19584.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2514, pruned_loss=0.05249, over 2827873.02 frames. ], batch size: 49, lr: 6.89e-03, grad_scale: 16.0 +2023-03-28 16:04:44,488 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41102.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:04:55,242 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41106.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:05:07,152 INFO [train.py:892] (0/4) Epoch 23, batch 300, loss[loss=0.1718, simple_loss=0.2533, pruned_loss=0.04512, over 19813.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2529, pruned_loss=0.05333, over 3076722.26 frames. ], batch size: 128, lr: 6.89e-03, grad_scale: 16.0 +2023-03-28 16:05:08,093 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2756, 5.5239, 5.8068, 5.5668, 5.4866, 5.2219, 5.4775, 5.3085], + device='cuda:0'), covar=tensor([0.1343, 0.1217, 0.0833, 0.1073, 0.0711, 0.0954, 0.1780, 0.1795], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0305, 0.0353, 0.0284, 0.0262, 0.0262, 0.0340, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 16:05:23,590 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.930e+02 3.981e+02 4.919e+02 5.993e+02 1.063e+03, threshold=9.839e+02, percent-clipped=1.0 +2023-03-28 16:06:36,969 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41148.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:07:05,699 INFO [train.py:892] (0/4) Epoch 23, batch 350, loss[loss=0.1741, simple_loss=0.2584, pruned_loss=0.04492, over 19961.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2516, pruned_loss=0.0529, over 3271021.27 frames. ], batch size: 53, lr: 6.88e-03, grad_scale: 16.0 +2023-03-28 16:07:10,774 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41163.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:08:27,230 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41196.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:09:03,396 INFO [train.py:892] (0/4) Epoch 23, batch 400, loss[loss=0.208, simple_loss=0.2876, pruned_loss=0.06424, over 19719.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2514, pruned_loss=0.0528, over 3422625.46 frames. ], batch size: 295, lr: 6.88e-03, grad_scale: 16.0 +2023-03-28 16:09:21,752 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2338, 4.9549, 4.9456, 5.2586, 4.9210, 5.4879, 5.4345, 5.5808], + device='cuda:0'), covar=tensor([0.0588, 0.0279, 0.0390, 0.0282, 0.0590, 0.0314, 0.0340, 0.0263], + device='cuda:0'), in_proj_covar=tensor([0.0143, 0.0165, 0.0188, 0.0163, 0.0162, 0.0147, 0.0141, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 16:09:22,959 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.551e+02 4.181e+02 5.035e+02 6.093e+02 9.382e+02, threshold=1.007e+03, percent-clipped=0.0 +2023-03-28 16:09:31,466 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41221.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:09:31,658 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8509, 2.9689, 3.1119, 3.0690, 2.7142, 2.9898, 2.8587, 3.0895], + device='cuda:0'), covar=tensor([0.0302, 0.0324, 0.0271, 0.0226, 0.0353, 0.0257, 0.0402, 0.0328], + device='cuda:0'), in_proj_covar=tensor([0.0073, 0.0068, 0.0071, 0.0065, 0.0077, 0.0072, 0.0090, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-03-28 16:10:58,717 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41256.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 16:11:08,148 INFO [train.py:892] (0/4) Epoch 23, batch 450, loss[loss=0.1461, simple_loss=0.2194, pruned_loss=0.03644, over 19790.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2513, pruned_loss=0.05234, over 3538876.26 frames. ], batch size: 94, lr: 6.87e-03, grad_scale: 16.0 +2023-03-28 16:12:57,483 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41307.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:13:05,170 INFO [train.py:892] (0/4) Epoch 23, batch 500, loss[loss=0.1684, simple_loss=0.2454, pruned_loss=0.04567, over 19785.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2531, pruned_loss=0.05327, over 3629335.84 frames. ], batch size: 91, lr: 6.87e-03, grad_scale: 16.0 +2023-03-28 16:13:24,678 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.561e+02 3.916e+02 4.619e+02 5.416e+02 1.072e+03, threshold=9.239e+02, percent-clipped=2.0 +2023-03-28 16:13:59,230 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=5.02 vs. limit=5.0 +2023-03-28 16:14:51,373 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41353.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:14:56,197 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41355.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:15:11,437 INFO [train.py:892] (0/4) Epoch 23, batch 550, loss[loss=0.1633, simple_loss=0.2437, pruned_loss=0.04142, over 19576.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2524, pruned_loss=0.05317, over 3701069.39 frames. ], batch size: 53, lr: 6.87e-03, grad_scale: 16.0 +2023-03-28 16:15:41,882 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5631, 2.5788, 4.0724, 2.8974, 3.2100, 3.0932, 2.1823, 2.3963], + device='cuda:0'), covar=tensor([0.1234, 0.3710, 0.0598, 0.1208, 0.2221, 0.1805, 0.2738, 0.2848], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0370, 0.0329, 0.0265, 0.0362, 0.0347, 0.0350, 0.0316], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 16:16:06,593 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-28 16:16:51,479 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41401.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:17:14,940 INFO [train.py:892] (0/4) Epoch 23, batch 600, loss[loss=0.1894, simple_loss=0.2589, pruned_loss=0.05993, over 19759.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2521, pruned_loss=0.05324, over 3756815.29 frames. ], batch size: 213, lr: 6.86e-03, grad_scale: 16.0 +2023-03-28 16:17:30,480 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.341e+02 3.739e+02 4.752e+02 5.799e+02 9.691e+02, threshold=9.504e+02, percent-clipped=1.0 +2023-03-28 16:19:02,354 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41458.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:19:09,203 INFO [train.py:892] (0/4) Epoch 23, batch 650, loss[loss=0.2265, simple_loss=0.2747, pruned_loss=0.08911, over 19763.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.253, pruned_loss=0.05322, over 3797815.96 frames. ], batch size: 205, lr: 6.86e-03, grad_scale: 16.0 +2023-03-28 16:20:51,750 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-28 16:21:07,515 INFO [train.py:892] (0/4) Epoch 23, batch 700, loss[loss=0.202, simple_loss=0.2814, pruned_loss=0.06127, over 19652.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2532, pruned_loss=0.05331, over 3832277.65 frames. ], batch size: 66, lr: 6.85e-03, grad_scale: 16.0 +2023-03-28 16:21:25,088 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.559e+02 3.930e+02 4.652e+02 5.850e+02 9.862e+02, threshold=9.304e+02, percent-clipped=2.0 +2023-03-28 16:21:31,950 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41521.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:22:57,143 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41556.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 16:23:06,381 INFO [train.py:892] (0/4) Epoch 23, batch 750, loss[loss=0.1856, simple_loss=0.2421, pruned_loss=0.06452, over 19737.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2523, pruned_loss=0.05312, over 3858907.91 frames. ], batch size: 140, lr: 6.85e-03, grad_scale: 16.0 +2023-03-28 16:23:24,375 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41569.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:23:31,046 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.47 vs. limit=5.0 +2023-03-28 16:24:46,090 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-28 16:24:47,236 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41604.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:25:00,930 INFO [train.py:892] (0/4) Epoch 23, batch 800, loss[loss=0.1615, simple_loss=0.2283, pruned_loss=0.04733, over 19757.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2526, pruned_loss=0.05288, over 3879344.69 frames. ], batch size: 125, lr: 6.85e-03, grad_scale: 16.0 +2023-03-28 16:25:18,846 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.557e+02 3.999e+02 4.823e+02 6.128e+02 1.113e+03, threshold=9.646e+02, percent-clipped=2.0 +2023-03-28 16:26:41,184 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41653.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:26:45,642 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41654.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:27:02,024 INFO [train.py:892] (0/4) Epoch 23, batch 850, loss[loss=0.1791, simple_loss=0.245, pruned_loss=0.05658, over 19781.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2529, pruned_loss=0.05306, over 3894338.38 frames. ], batch size: 131, lr: 6.84e-03, grad_scale: 16.0 +2023-03-28 16:28:35,205 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41701.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:28:35,290 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41701.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:28:59,723 INFO [train.py:892] (0/4) Epoch 23, batch 900, loss[loss=0.2192, simple_loss=0.2752, pruned_loss=0.08158, over 19764.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2527, pruned_loss=0.05349, over 3905873.31 frames. ], batch size: 244, lr: 6.84e-03, grad_scale: 16.0 +2023-03-28 16:29:10,377 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41715.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 16:29:18,869 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.536e+02 3.998e+02 4.733e+02 5.736e+02 9.757e+02, threshold=9.466e+02, percent-clipped=2.0 +2023-03-28 16:30:08,595 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-28 16:30:31,279 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41749.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:30:53,912 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41758.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:31:00,097 INFO [train.py:892] (0/4) Epoch 23, batch 950, loss[loss=0.1912, simple_loss=0.2648, pruned_loss=0.05883, over 19695.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2538, pruned_loss=0.05358, over 3914731.90 frames. ], batch size: 46, lr: 6.83e-03, grad_scale: 16.0 +2023-03-28 16:32:13,211 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-28 16:32:53,013 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41806.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:33:02,659 INFO [train.py:892] (0/4) Epoch 23, batch 1000, loss[loss=0.1875, simple_loss=0.2474, pruned_loss=0.06379, over 19758.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2519, pruned_loss=0.05317, over 3923346.33 frames. ], batch size: 213, lr: 6.83e-03, grad_scale: 16.0 +2023-03-28 16:33:19,749 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.497e+02 4.077e+02 4.736e+02 5.828e+02 1.008e+03, threshold=9.473e+02, percent-clipped=1.0 +2023-03-28 16:33:43,693 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1386, 3.9524, 3.9708, 3.7753, 4.1302, 2.9134, 3.4830, 2.0303], + device='cuda:0'), covar=tensor([0.0184, 0.0208, 0.0132, 0.0185, 0.0129, 0.1005, 0.0629, 0.1479], + device='cuda:0'), in_proj_covar=tensor([0.0099, 0.0137, 0.0108, 0.0128, 0.0112, 0.0128, 0.0138, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 16:34:37,205 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7215, 3.9271, 4.1543, 4.9255, 3.0764, 3.5939, 3.1196, 2.9575], + device='cuda:0'), covar=tensor([0.0449, 0.1955, 0.0775, 0.0264, 0.2083, 0.0960, 0.1183, 0.1580], + device='cuda:0'), in_proj_covar=tensor([0.0234, 0.0334, 0.0242, 0.0187, 0.0244, 0.0200, 0.0211, 0.0212], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 16:35:00,306 INFO [train.py:892] (0/4) Epoch 23, batch 1050, loss[loss=0.1518, simple_loss=0.2287, pruned_loss=0.03747, over 19780.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2523, pruned_loss=0.05294, over 3930292.26 frames. ], batch size: 91, lr: 6.83e-03, grad_scale: 16.0 +2023-03-28 16:36:34,695 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-03-28 16:36:49,467 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41906.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:37:01,120 INFO [train.py:892] (0/4) Epoch 23, batch 1100, loss[loss=0.1455, simple_loss=0.2257, pruned_loss=0.03271, over 19769.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2525, pruned_loss=0.05304, over 3933257.63 frames. ], batch size: 113, lr: 6.82e-03, grad_scale: 16.0 +2023-03-28 16:37:20,546 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.656e+02 4.250e+02 5.076e+02 6.187e+02 1.225e+03, threshold=1.015e+03, percent-clipped=1.0 +2023-03-28 16:37:30,733 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2883, 4.1894, 4.6742, 4.4964, 4.5700, 4.0652, 4.4098, 4.2256], + device='cuda:0'), covar=tensor([0.1553, 0.1765, 0.1034, 0.1325, 0.0911, 0.1147, 0.1828, 0.2113], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0304, 0.0351, 0.0282, 0.0262, 0.0262, 0.0336, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 16:37:53,544 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.3875, 2.4590, 2.5765, 2.4783, 2.5150, 2.6078, 2.5011, 2.5955], + device='cuda:0'), covar=tensor([0.0327, 0.0298, 0.0329, 0.0251, 0.0344, 0.0251, 0.0369, 0.0371], + device='cuda:0'), in_proj_covar=tensor([0.0073, 0.0067, 0.0070, 0.0064, 0.0076, 0.0071, 0.0088, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-03-28 16:38:31,130 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41948.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 16:38:58,874 INFO [train.py:892] (0/4) Epoch 23, batch 1150, loss[loss=0.1755, simple_loss=0.2651, pruned_loss=0.04291, over 19780.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2528, pruned_loss=0.05283, over 3935863.90 frames. ], batch size: 53, lr: 6.82e-03, grad_scale: 16.0 +2023-03-28 16:39:13,514 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41967.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:40:32,919 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-42000.pt +2023-03-28 16:40:58,803 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42009.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 16:41:01,025 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42010.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 16:41:03,083 INFO [train.py:892] (0/4) Epoch 23, batch 1200, loss[loss=0.1697, simple_loss=0.2391, pruned_loss=0.05013, over 19650.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2527, pruned_loss=0.05278, over 3939193.27 frames. ], batch size: 47, lr: 6.81e-03, grad_scale: 16.0 +2023-03-28 16:41:23,893 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.867e+02 3.967e+02 4.687e+02 5.246e+02 9.664e+02, threshold=9.374e+02, percent-clipped=0.0 +2023-03-28 16:41:33,516 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5538, 3.4160, 3.8697, 2.9473, 3.9648, 3.1982, 3.3736, 4.0552], + device='cuda:0'), covar=tensor([0.0612, 0.0361, 0.0418, 0.0750, 0.0366, 0.0387, 0.0415, 0.0216], + device='cuda:0'), in_proj_covar=tensor([0.0071, 0.0079, 0.0077, 0.0106, 0.0073, 0.0074, 0.0071, 0.0064], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 16:42:20,123 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3865, 3.5662, 3.7750, 4.4226, 3.0898, 3.3119, 2.5833, 2.5659], + device='cuda:0'), covar=tensor([0.0501, 0.2442, 0.0958, 0.0329, 0.1977, 0.0965, 0.1478, 0.1860], + device='cuda:0'), in_proj_covar=tensor([0.0232, 0.0329, 0.0241, 0.0185, 0.0241, 0.0198, 0.0211, 0.0210], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 16:43:04,814 INFO [train.py:892] (0/4) Epoch 23, batch 1250, loss[loss=0.1369, simple_loss=0.2168, pruned_loss=0.02848, over 19796.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2515, pruned_loss=0.05214, over 3940212.06 frames. ], batch size: 111, lr: 6.81e-03, grad_scale: 16.0 +2023-03-28 16:44:59,356 INFO [train.py:892] (0/4) Epoch 23, batch 1300, loss[loss=0.1743, simple_loss=0.239, pruned_loss=0.05478, over 19800.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2517, pruned_loss=0.05221, over 3941803.00 frames. ], batch size: 150, lr: 6.81e-03, grad_scale: 16.0 +2023-03-28 16:45:16,104 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.506e+02 3.507e+02 4.427e+02 5.595e+02 1.023e+03, threshold=8.855e+02, percent-clipped=1.0 +2023-03-28 16:46:59,030 INFO [train.py:892] (0/4) Epoch 23, batch 1350, loss[loss=0.2276, simple_loss=0.2966, pruned_loss=0.07928, over 19614.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2513, pruned_loss=0.05205, over 3943603.75 frames. ], batch size: 367, lr: 6.80e-03, grad_scale: 16.0 +2023-03-28 16:47:02,266 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0933, 3.3186, 2.8435, 2.3929, 2.8511, 3.2879, 3.1087, 3.2376], + device='cuda:0'), covar=tensor([0.0237, 0.0259, 0.0272, 0.0506, 0.0372, 0.0243, 0.0219, 0.0187], + device='cuda:0'), in_proj_covar=tensor([0.0091, 0.0085, 0.0091, 0.0095, 0.0098, 0.0075, 0.0074, 0.0075], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 16:47:58,699 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3002, 5.7194, 5.8549, 5.6389, 5.5469, 5.4438, 5.5177, 5.3922], + device='cuda:0'), covar=tensor([0.1416, 0.1481, 0.0962, 0.1204, 0.0691, 0.0741, 0.1888, 0.1926], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0301, 0.0348, 0.0279, 0.0259, 0.0260, 0.0334, 0.0362], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 16:48:55,512 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-03-28 16:48:58,470 INFO [train.py:892] (0/4) Epoch 23, batch 1400, loss[loss=0.1665, simple_loss=0.2374, pruned_loss=0.04779, over 19881.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2502, pruned_loss=0.05156, over 3946083.12 frames. ], batch size: 157, lr: 6.80e-03, grad_scale: 16.0 +2023-03-28 16:49:18,204 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.596e+02 4.046e+02 4.840e+02 5.531e+02 1.167e+03, threshold=9.681e+02, percent-clipped=2.0 +2023-03-28 16:50:41,082 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-28 16:50:55,946 INFO [train.py:892] (0/4) Epoch 23, batch 1450, loss[loss=0.1827, simple_loss=0.2521, pruned_loss=0.05664, over 19741.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.25, pruned_loss=0.05139, over 3948474.46 frames. ], batch size: 221, lr: 6.79e-03, grad_scale: 16.0 +2023-03-28 16:50:58,801 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42262.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:51:16,315 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6826, 2.8312, 4.0071, 3.0656, 3.4078, 3.2276, 2.2641, 2.4443], + device='cuda:0'), covar=tensor([0.0925, 0.2832, 0.0531, 0.0927, 0.1488, 0.1338, 0.2305, 0.2641], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0371, 0.0329, 0.0266, 0.0363, 0.0350, 0.0351, 0.0320], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 16:52:39,466 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42304.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 16:52:55,006 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42310.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 16:52:56,157 INFO [train.py:892] (0/4) Epoch 23, batch 1500, loss[loss=0.1846, simple_loss=0.2655, pruned_loss=0.05185, over 19719.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2504, pruned_loss=0.05173, over 3948833.13 frames. ], batch size: 54, lr: 6.79e-03, grad_scale: 16.0 +2023-03-28 16:53:09,475 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-28 16:53:12,434 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.820e+02 3.834e+02 4.491e+02 5.475e+02 9.229e+02, threshold=8.983e+02, percent-clipped=0.0 +2023-03-28 16:53:19,954 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2122, 4.0812, 4.5474, 4.1466, 3.8299, 4.3689, 4.2409, 4.6282], + device='cuda:0'), covar=tensor([0.0860, 0.0367, 0.0368, 0.0386, 0.1088, 0.0552, 0.0484, 0.0343], + device='cuda:0'), in_proj_covar=tensor([0.0274, 0.0215, 0.0213, 0.0224, 0.0201, 0.0228, 0.0225, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 16:53:28,471 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3923, 4.4998, 2.6470, 4.7546, 4.9324, 2.1306, 4.1385, 3.5726], + device='cuda:0'), covar=tensor([0.0605, 0.0714, 0.2590, 0.0591, 0.0400, 0.2769, 0.0873, 0.0812], + device='cuda:0'), in_proj_covar=tensor([0.0226, 0.0247, 0.0227, 0.0259, 0.0236, 0.0200, 0.0233, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 16:54:46,261 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42358.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:54:51,543 INFO [train.py:892] (0/4) Epoch 23, batch 1550, loss[loss=0.1449, simple_loss=0.2209, pruned_loss=0.03445, over 19756.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2507, pruned_loss=0.05175, over 3949564.67 frames. ], batch size: 110, lr: 6.79e-03, grad_scale: 16.0 +2023-03-28 16:55:37,928 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42379.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:56:13,445 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2707, 4.3331, 4.6318, 4.4693, 4.5340, 4.1413, 4.3177, 4.2487], + device='cuda:0'), covar=tensor([0.1424, 0.1588, 0.0958, 0.1258, 0.0829, 0.0997, 0.1962, 0.1971], + device='cuda:0'), in_proj_covar=tensor([0.0278, 0.0305, 0.0351, 0.0282, 0.0261, 0.0260, 0.0337, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 16:56:13,503 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4513, 4.7205, 4.7480, 4.6540, 4.4716, 4.7175, 4.2351, 4.2865], + device='cuda:0'), covar=tensor([0.0459, 0.0469, 0.0521, 0.0424, 0.0553, 0.0518, 0.0635, 0.0911], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0255, 0.0274, 0.0236, 0.0238, 0.0225, 0.0246, 0.0289], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 16:56:55,952 INFO [train.py:892] (0/4) Epoch 23, batch 1600, loss[loss=0.1812, simple_loss=0.2579, pruned_loss=0.05224, over 19756.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.251, pruned_loss=0.05142, over 3949735.55 frames. ], batch size: 256, lr: 6.78e-03, grad_scale: 16.0 +2023-03-28 16:56:57,753 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.88 vs. limit=5.0 +2023-03-28 16:57:13,626 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.225e+02 3.903e+02 4.499e+02 5.665e+02 1.044e+03, threshold=8.998e+02, percent-clipped=1.0 +2023-03-28 16:58:05,824 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42440.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 16:58:53,570 INFO [train.py:892] (0/4) Epoch 23, batch 1650, loss[loss=0.1519, simple_loss=0.2192, pruned_loss=0.04236, over 19800.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2517, pruned_loss=0.05189, over 3951073.02 frames. ], batch size: 151, lr: 6.78e-03, grad_scale: 16.0 +2023-03-28 17:00:22,666 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7516, 2.8224, 4.5015, 3.9240, 4.3908, 4.5127, 4.3130, 4.1939], + device='cuda:0'), covar=tensor([0.0414, 0.0882, 0.0108, 0.0823, 0.0117, 0.0168, 0.0155, 0.0147], + device='cuda:0'), in_proj_covar=tensor([0.0093, 0.0100, 0.0082, 0.0149, 0.0079, 0.0093, 0.0086, 0.0081], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 17:00:37,813 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42506.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:00:47,775 INFO [train.py:892] (0/4) Epoch 23, batch 1700, loss[loss=0.1743, simple_loss=0.2387, pruned_loss=0.05491, over 19805.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2508, pruned_loss=0.05157, over 3951882.73 frames. ], batch size: 47, lr: 6.77e-03, grad_scale: 16.0 +2023-03-28 17:01:09,673 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.489e+02 4.052e+02 4.477e+02 5.328e+02 1.019e+03, threshold=8.953e+02, percent-clipped=3.0 +2023-03-28 17:01:24,336 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0140, 4.5925, 4.6269, 4.9455, 4.6601, 5.1834, 5.0848, 5.3045], + device='cuda:0'), covar=tensor([0.0596, 0.0355, 0.0403, 0.0301, 0.0619, 0.0351, 0.0394, 0.0290], + device='cuda:0'), in_proj_covar=tensor([0.0144, 0.0169, 0.0192, 0.0165, 0.0166, 0.0148, 0.0144, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 17:02:39,531 INFO [train.py:892] (0/4) Epoch 23, batch 1750, loss[loss=0.1584, simple_loss=0.2227, pruned_loss=0.0471, over 19875.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2511, pruned_loss=0.05193, over 3951627.78 frames. ], batch size: 159, lr: 6.77e-03, grad_scale: 16.0 +2023-03-28 17:02:43,418 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42562.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:02:43,602 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2456, 3.2547, 2.1410, 3.9075, 3.5571, 3.9409, 3.9043, 3.0545], + device='cuda:0'), covar=tensor([0.0617, 0.0616, 0.1487, 0.0591, 0.0647, 0.0375, 0.0597, 0.0782], + device='cuda:0'), in_proj_covar=tensor([0.0137, 0.0135, 0.0140, 0.0141, 0.0125, 0.0124, 0.0138, 0.0139], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 17:02:52,544 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42567.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:04:07,763 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42604.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 17:04:19,807 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42610.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:04:21,415 INFO [train.py:892] (0/4) Epoch 23, batch 1800, loss[loss=0.2108, simple_loss=0.2803, pruned_loss=0.07069, over 19704.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2515, pruned_loss=0.05195, over 3951514.90 frames. ], batch size: 265, lr: 6.77e-03, grad_scale: 16.0 +2023-03-28 17:04:37,538 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.634e+02 3.841e+02 4.565e+02 5.704e+02 1.447e+03, threshold=9.129e+02, percent-clipped=3.0 +2023-03-28 17:05:31,781 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42649.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:05:37,461 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42652.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 17:05:54,185 INFO [train.py:892] (0/4) Epoch 23, batch 1850, loss[loss=0.1958, simple_loss=0.2751, pruned_loss=0.05822, over 19831.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2524, pruned_loss=0.05131, over 3950991.87 frames. ], batch size: 57, lr: 6.76e-03, grad_scale: 16.0 +2023-03-28 17:06:01,686 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-23.pt +2023-03-28 17:06:55,096 INFO [train.py:892] (0/4) Epoch 24, batch 0, loss[loss=0.1477, simple_loss=0.2185, pruned_loss=0.03842, over 19793.00 frames. ], tot_loss[loss=0.1477, simple_loss=0.2185, pruned_loss=0.03842, over 19793.00 frames. ], batch size: 105, lr: 6.62e-03, grad_scale: 16.0 +2023-03-28 17:06:55,097 INFO [train.py:917] (0/4) Computing validation loss +2023-03-28 17:07:24,173 INFO [train.py:926] (0/4) Epoch 24, validation: loss=0.1738, simple_loss=0.2478, pruned_loss=0.0499, over 2883724.00 frames. +2023-03-28 17:07:24,175 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-28 17:07:25,251 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6925, 4.6247, 5.1441, 4.6868, 4.1793, 4.9106, 4.8138, 5.3081], + device='cuda:0'), covar=tensor([0.0974, 0.0362, 0.0374, 0.0365, 0.0880, 0.0507, 0.0418, 0.0321], + device='cuda:0'), in_proj_covar=tensor([0.0275, 0.0215, 0.0214, 0.0224, 0.0201, 0.0229, 0.0224, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 17:09:11,830 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42710.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:09:24,496 INFO [train.py:892] (0/4) Epoch 24, batch 50, loss[loss=0.2719, simple_loss=0.3406, pruned_loss=0.1016, over 19492.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2533, pruned_loss=0.05508, over 890716.93 frames. ], batch size: 396, lr: 6.61e-03, grad_scale: 16.0 +2023-03-28 17:09:30,669 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.706e+02 3.984e+02 4.809e+02 5.646e+02 9.126e+02, threshold=9.617e+02, percent-clipped=0.0 +2023-03-28 17:10:09,725 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42735.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:11:17,041 INFO [train.py:892] (0/4) Epoch 24, batch 100, loss[loss=0.1637, simple_loss=0.2411, pruned_loss=0.04313, over 19723.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2492, pruned_loss=0.05112, over 1570381.03 frames. ], batch size: 104, lr: 6.61e-03, grad_scale: 16.0 +2023-03-28 17:11:32,404 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7733, 4.0524, 4.1884, 4.9822, 3.1758, 3.6528, 3.0860, 3.0490], + device='cuda:0'), covar=tensor([0.0460, 0.1907, 0.0804, 0.0305, 0.2089, 0.0948, 0.1208, 0.1565], + device='cuda:0'), in_proj_covar=tensor([0.0233, 0.0332, 0.0242, 0.0189, 0.0243, 0.0199, 0.0212, 0.0210], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 17:12:45,600 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0960, 4.1170, 2.3919, 4.3486, 4.4638, 1.8917, 3.7002, 3.3582], + device='cuda:0'), covar=tensor([0.0694, 0.0870, 0.2827, 0.0847, 0.0599, 0.3049, 0.1092, 0.0863], + device='cuda:0'), in_proj_covar=tensor([0.0224, 0.0248, 0.0225, 0.0258, 0.0236, 0.0199, 0.0233, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 17:13:12,218 INFO [train.py:892] (0/4) Epoch 24, batch 150, loss[loss=0.1888, simple_loss=0.2774, pruned_loss=0.05007, over 19647.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2515, pruned_loss=0.05193, over 2097376.17 frames. ], batch size: 55, lr: 6.60e-03, grad_scale: 16.0 +2023-03-28 17:13:19,958 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.303e+02 3.674e+02 4.384e+02 5.229e+02 7.320e+02, threshold=8.767e+02, percent-clipped=0.0 +2023-03-28 17:14:17,351 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9956, 3.3431, 2.7898, 2.4920, 2.9410, 3.2241, 3.1859, 3.1724], + device='cuda:0'), covar=tensor([0.0240, 0.0272, 0.0270, 0.0448, 0.0298, 0.0213, 0.0172, 0.0210], + device='cuda:0'), in_proj_covar=tensor([0.0090, 0.0083, 0.0089, 0.0093, 0.0096, 0.0074, 0.0073, 0.0074], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 17:14:33,634 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1038, 3.0714, 2.0007, 3.7079, 3.4132, 3.7138, 3.7799, 2.8678], + device='cuda:0'), covar=tensor([0.0609, 0.0620, 0.1625, 0.0541, 0.0506, 0.0358, 0.0495, 0.0799], + device='cuda:0'), in_proj_covar=tensor([0.0138, 0.0136, 0.0140, 0.0142, 0.0125, 0.0124, 0.0138, 0.0140], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 17:14:40,583 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4546, 4.3250, 4.7930, 4.3659, 4.0531, 4.6414, 4.5077, 4.9315], + device='cuda:0'), covar=tensor([0.0934, 0.0421, 0.0414, 0.0430, 0.0925, 0.0481, 0.0439, 0.0343], + device='cuda:0'), in_proj_covar=tensor([0.0274, 0.0215, 0.0214, 0.0224, 0.0202, 0.0228, 0.0223, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 17:14:53,478 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4413, 4.7712, 4.7930, 4.6841, 4.5218, 4.7704, 4.3057, 4.3378], + device='cuda:0'), covar=tensor([0.0495, 0.0432, 0.0447, 0.0422, 0.0528, 0.0478, 0.0646, 0.0927], + device='cuda:0'), in_proj_covar=tensor([0.0250, 0.0261, 0.0280, 0.0240, 0.0244, 0.0232, 0.0250, 0.0297], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 17:14:53,552 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42859.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:15:01,889 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42862.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:15:09,287 INFO [train.py:892] (0/4) Epoch 24, batch 200, loss[loss=0.1564, simple_loss=0.2217, pruned_loss=0.04552, over 19742.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2541, pruned_loss=0.05273, over 2505608.13 frames. ], batch size: 140, lr: 6.60e-03, grad_scale: 16.0 +2023-03-28 17:15:39,847 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42879.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:15:50,946 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7954, 4.3919, 4.4971, 4.7461, 4.3312, 4.9655, 4.8978, 5.1332], + device='cuda:0'), covar=tensor([0.0659, 0.0374, 0.0406, 0.0302, 0.0725, 0.0340, 0.0360, 0.0246], + device='cuda:0'), in_proj_covar=tensor([0.0145, 0.0170, 0.0193, 0.0166, 0.0166, 0.0149, 0.0144, 0.0190], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 17:16:57,417 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.2765, 2.2280, 2.4735, 2.3577, 2.3357, 2.5111, 2.2762, 2.4193], + device='cuda:0'), covar=tensor([0.0315, 0.0393, 0.0296, 0.0264, 0.0396, 0.0263, 0.0439, 0.0301], + device='cuda:0'), in_proj_covar=tensor([0.0075, 0.0068, 0.0072, 0.0065, 0.0078, 0.0073, 0.0090, 0.0064], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-03-28 17:17:07,504 INFO [train.py:892] (0/4) Epoch 24, batch 250, loss[loss=0.183, simple_loss=0.261, pruned_loss=0.05253, over 19747.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2525, pruned_loss=0.05187, over 2825729.58 frames. ], batch size: 291, lr: 6.60e-03, grad_scale: 16.0 +2023-03-28 17:17:15,130 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.878e+02 4.086e+02 4.768e+02 5.713e+02 1.218e+03, threshold=9.536e+02, percent-clipped=1.0 +2023-03-28 17:17:18,017 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42920.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:18:07,851 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42940.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:18:11,183 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-28 17:18:35,972 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2305, 2.8767, 3.2672, 3.0141, 3.4069, 3.3116, 4.0201, 4.4303], + device='cuda:0'), covar=tensor([0.0613, 0.1670, 0.1464, 0.2044, 0.1657, 0.1514, 0.0602, 0.0570], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0234, 0.0258, 0.0247, 0.0287, 0.0249, 0.0218, 0.0241], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 17:19:11,299 INFO [train.py:892] (0/4) Epoch 24, batch 300, loss[loss=0.1918, simple_loss=0.2815, pruned_loss=0.0511, over 19816.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2529, pruned_loss=0.05176, over 3074655.38 frames. ], batch size: 50, lr: 6.59e-03, grad_scale: 16.0 +2023-03-28 17:19:48,662 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7406, 3.5830, 3.5785, 3.3661, 3.6888, 2.7793, 3.0147, 1.7386], + device='cuda:0'), covar=tensor([0.0218, 0.0266, 0.0163, 0.0225, 0.0160, 0.1075, 0.0726, 0.1763], + device='cuda:0'), in_proj_covar=tensor([0.0101, 0.0141, 0.0110, 0.0130, 0.0115, 0.0131, 0.0141, 0.0124], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 17:20:47,156 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43005.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:21:09,374 INFO [train.py:892] (0/4) Epoch 24, batch 350, loss[loss=0.1666, simple_loss=0.2387, pruned_loss=0.04726, over 19622.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.253, pruned_loss=0.05164, over 3267182.37 frames. ], batch size: 65, lr: 6.59e-03, grad_scale: 16.0 +2023-03-28 17:21:15,714 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.627e+02 4.222e+02 4.669e+02 5.474e+02 9.136e+02, threshold=9.338e+02, percent-clipped=0.0 +2023-03-28 17:21:27,666 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-28 17:21:58,390 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43035.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:23:09,381 INFO [train.py:892] (0/4) Epoch 24, batch 400, loss[loss=0.1585, simple_loss=0.2269, pruned_loss=0.04508, over 19846.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2528, pruned_loss=0.05157, over 3418337.80 frames. ], batch size: 143, lr: 6.59e-03, grad_scale: 16.0 +2023-03-28 17:23:55,357 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43083.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:25:13,665 INFO [train.py:892] (0/4) Epoch 24, batch 450, loss[loss=0.1454, simple_loss=0.2196, pruned_loss=0.03557, over 19848.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2526, pruned_loss=0.05156, over 3537301.05 frames. ], batch size: 115, lr: 6.58e-03, grad_scale: 16.0 +2023-03-28 17:25:20,844 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.554e+02 4.148e+02 4.850e+02 5.814e+02 7.870e+02, threshold=9.701e+02, percent-clipped=0.0 +2023-03-28 17:25:21,812 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3308, 3.6876, 3.6922, 4.4631, 2.9428, 3.4810, 2.7589, 2.6519], + device='cuda:0'), covar=tensor([0.0524, 0.2211, 0.1030, 0.0359, 0.2070, 0.0866, 0.1306, 0.1778], + device='cuda:0'), in_proj_covar=tensor([0.0231, 0.0329, 0.0239, 0.0188, 0.0240, 0.0196, 0.0208, 0.0209], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 17:25:28,058 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3210, 5.6399, 5.6464, 5.5800, 5.3106, 5.6202, 5.0524, 5.1117], + device='cuda:0'), covar=tensor([0.0442, 0.0374, 0.0482, 0.0376, 0.0568, 0.0534, 0.0665, 0.0898], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0261, 0.0278, 0.0238, 0.0243, 0.0232, 0.0249, 0.0296], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 17:27:01,390 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43162.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:27:08,687 INFO [train.py:892] (0/4) Epoch 24, batch 500, loss[loss=0.2279, simple_loss=0.2961, pruned_loss=0.07986, over 19636.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2517, pruned_loss=0.05114, over 3629514.56 frames. ], batch size: 330, lr: 6.58e-03, grad_scale: 16.0 +2023-03-28 17:28:56,654 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43210.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:29:09,113 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43215.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:29:10,718 INFO [train.py:892] (0/4) Epoch 24, batch 550, loss[loss=0.1769, simple_loss=0.254, pruned_loss=0.04987, over 19707.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2506, pruned_loss=0.05077, over 3700924.47 frames. ], batch size: 60, lr: 6.57e-03, grad_scale: 16.0 +2023-03-28 17:29:18,521 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.622e+02 3.772e+02 4.624e+02 5.545e+02 8.960e+02, threshold=9.249e+02, percent-clipped=0.0 +2023-03-28 17:29:59,303 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43235.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:30:14,351 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.66 vs. limit=5.0 +2023-03-28 17:31:16,247 INFO [train.py:892] (0/4) Epoch 24, batch 600, loss[loss=0.1808, simple_loss=0.2629, pruned_loss=0.04941, over 19802.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2511, pruned_loss=0.05093, over 3756696.08 frames. ], batch size: 51, lr: 6.57e-03, grad_scale: 16.0 +2023-03-28 17:32:49,704 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43305.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:33:14,112 INFO [train.py:892] (0/4) Epoch 24, batch 650, loss[loss=0.1696, simple_loss=0.2531, pruned_loss=0.04307, over 19605.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2503, pruned_loss=0.05073, over 3799942.14 frames. ], batch size: 48, lr: 6.57e-03, grad_scale: 16.0 +2023-03-28 17:33:20,460 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.750e+02 3.990e+02 4.608e+02 6.131e+02 1.046e+03, threshold=9.216e+02, percent-clipped=2.0 +2023-03-28 17:34:34,615 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43353.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:35:00,969 INFO [train.py:892] (0/4) Epoch 24, batch 700, loss[loss=0.1571, simple_loss=0.2451, pruned_loss=0.03457, over 19784.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2513, pruned_loss=0.05143, over 3833206.47 frames. ], batch size: 52, lr: 6.56e-03, grad_scale: 16.0 +2023-03-28 17:37:05,640 INFO [train.py:892] (0/4) Epoch 24, batch 750, loss[loss=0.1682, simple_loss=0.238, pruned_loss=0.04923, over 19763.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2502, pruned_loss=0.05058, over 3858564.71 frames. ], batch size: 217, lr: 6.56e-03, grad_scale: 16.0 +2023-03-28 17:37:13,193 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.432e+02 3.941e+02 4.702e+02 5.595e+02 1.048e+03, threshold=9.403e+02, percent-clipped=2.0 +2023-03-28 17:37:45,896 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2265, 3.5266, 3.7668, 4.2670, 2.9617, 3.3815, 2.7096, 2.7127], + device='cuda:0'), covar=tensor([0.0480, 0.1875, 0.0802, 0.0353, 0.1859, 0.0851, 0.1268, 0.1560], + device='cuda:0'), in_proj_covar=tensor([0.0233, 0.0330, 0.0242, 0.0189, 0.0242, 0.0199, 0.0211, 0.0212], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 17:39:06,680 INFO [train.py:892] (0/4) Epoch 24, batch 800, loss[loss=0.1844, simple_loss=0.2425, pruned_loss=0.06311, over 19875.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2502, pruned_loss=0.05067, over 3879096.42 frames. ], batch size: 157, lr: 6.56e-03, grad_scale: 16.0 +2023-03-28 17:39:31,693 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7020, 2.8562, 2.9506, 2.8916, 2.6850, 2.9681, 2.6872, 2.9879], + device='cuda:0'), covar=tensor([0.0263, 0.0255, 0.0282, 0.0236, 0.0349, 0.0242, 0.0362, 0.0277], + device='cuda:0'), in_proj_covar=tensor([0.0075, 0.0069, 0.0072, 0.0066, 0.0079, 0.0074, 0.0090, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-03-28 17:41:03,983 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43514.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:41:05,895 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43515.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:41:06,978 INFO [train.py:892] (0/4) Epoch 24, batch 850, loss[loss=0.1929, simple_loss=0.2713, pruned_loss=0.05722, over 19800.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2505, pruned_loss=0.05093, over 3895628.25 frames. ], batch size: 67, lr: 6.55e-03, grad_scale: 16.0 +2023-03-28 17:41:14,147 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.358e+02 3.878e+02 4.715e+02 5.531e+02 7.871e+02, threshold=9.429e+02, percent-clipped=0.0 +2023-03-28 17:41:29,727 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-03-28 17:41:51,970 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43535.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:42:23,687 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3006, 2.5529, 3.5191, 2.9187, 3.0329, 2.9693, 2.0714, 2.2275], + device='cuda:0'), covar=tensor([0.1043, 0.2635, 0.0668, 0.0977, 0.1633, 0.1311, 0.2341, 0.2462], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0371, 0.0331, 0.0266, 0.0362, 0.0350, 0.0351, 0.0320], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 17:43:00,544 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43563.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:43:06,593 INFO [train.py:892] (0/4) Epoch 24, batch 900, loss[loss=0.1781, simple_loss=0.2498, pruned_loss=0.05324, over 19798.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2501, pruned_loss=0.05077, over 3906901.88 frames. ], batch size: 67, lr: 6.55e-03, grad_scale: 16.0 +2023-03-28 17:43:25,793 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43575.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:43:30,103 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-28 17:43:45,725 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43583.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:45:04,116 INFO [train.py:892] (0/4) Epoch 24, batch 950, loss[loss=0.1516, simple_loss=0.2286, pruned_loss=0.03729, over 19793.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2515, pruned_loss=0.05131, over 3916767.51 frames. ], batch size: 83, lr: 6.54e-03, grad_scale: 16.0 +2023-03-28 17:45:11,470 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.960e+02 4.203e+02 4.948e+02 5.601e+02 1.021e+03, threshold=9.897e+02, percent-clipped=1.0 +2023-03-28 17:45:19,735 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-28 17:47:04,240 INFO [train.py:892] (0/4) Epoch 24, batch 1000, loss[loss=0.1724, simple_loss=0.2448, pruned_loss=0.04998, over 19755.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2505, pruned_loss=0.05079, over 3924758.85 frames. ], batch size: 259, lr: 6.54e-03, grad_scale: 16.0 +2023-03-28 17:47:07,357 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0714, 4.0228, 2.4563, 4.2776, 4.4409, 1.9594, 3.6500, 3.3390], + device='cuda:0'), covar=tensor([0.0669, 0.0909, 0.2790, 0.0765, 0.0623, 0.2774, 0.1092, 0.0811], + device='cuda:0'), in_proj_covar=tensor([0.0226, 0.0250, 0.0227, 0.0261, 0.0241, 0.0201, 0.0235, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 17:47:52,741 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4074, 3.1702, 3.3713, 2.9739, 3.6412, 3.6548, 4.2592, 4.7205], + device='cuda:0'), covar=tensor([0.0489, 0.1529, 0.1400, 0.2260, 0.1566, 0.1266, 0.0558, 0.0398], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0238, 0.0261, 0.0249, 0.0291, 0.0251, 0.0220, 0.0243], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 17:49:06,746 INFO [train.py:892] (0/4) Epoch 24, batch 1050, loss[loss=0.1742, simple_loss=0.2414, pruned_loss=0.05351, over 19842.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2513, pruned_loss=0.0517, over 3929811.53 frames. ], batch size: 145, lr: 6.54e-03, grad_scale: 32.0 +2023-03-28 17:49:14,073 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.754e+02 4.065e+02 4.703e+02 5.528e+02 1.039e+03, threshold=9.406e+02, percent-clipped=2.0 +2023-03-28 17:50:18,726 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-28 17:50:46,222 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4617, 4.4430, 4.8390, 4.4260, 4.0220, 4.6506, 4.4700, 4.9204], + device='cuda:0'), covar=tensor([0.0862, 0.0338, 0.0367, 0.0389, 0.0920, 0.0488, 0.0471, 0.0326], + device='cuda:0'), in_proj_covar=tensor([0.0273, 0.0213, 0.0214, 0.0224, 0.0201, 0.0227, 0.0222, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 17:50:48,421 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8853, 2.4204, 2.9800, 3.1600, 3.6364, 3.9716, 3.8765, 4.0586], + device='cuda:0'), covar=tensor([0.0919, 0.1573, 0.1168, 0.0687, 0.0370, 0.0264, 0.0393, 0.0298], + device='cuda:0'), in_proj_covar=tensor([0.0154, 0.0167, 0.0172, 0.0146, 0.0127, 0.0123, 0.0117, 0.0109], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 17:51:07,532 INFO [train.py:892] (0/4) Epoch 24, batch 1100, loss[loss=0.1853, simple_loss=0.2581, pruned_loss=0.05625, over 19653.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2521, pruned_loss=0.05162, over 3933937.96 frames. ], batch size: 47, lr: 6.53e-03, grad_scale: 16.0 +2023-03-28 17:51:11,065 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2850, 4.0789, 4.1466, 3.9367, 4.2807, 3.0669, 3.6501, 2.1411], + device='cuda:0'), covar=tensor([0.0201, 0.0213, 0.0141, 0.0177, 0.0132, 0.0910, 0.0652, 0.1470], + device='cuda:0'), in_proj_covar=tensor([0.0102, 0.0142, 0.0112, 0.0131, 0.0116, 0.0132, 0.0142, 0.0125], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 17:51:56,870 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43787.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:52:07,255 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-03-28 17:52:11,060 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2320, 2.5278, 3.4749, 2.7839, 3.0066, 2.9220, 2.0697, 2.1946], + device='cuda:0'), covar=tensor([0.1083, 0.2722, 0.0670, 0.1037, 0.1625, 0.1372, 0.2378, 0.2558], + device='cuda:0'), in_proj_covar=tensor([0.0344, 0.0376, 0.0334, 0.0270, 0.0367, 0.0354, 0.0356, 0.0323], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 17:52:15,550 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4484, 4.4728, 4.8110, 4.6106, 4.7483, 4.2976, 4.5297, 4.3192], + device='cuda:0'), covar=tensor([0.1505, 0.1467, 0.0924, 0.1255, 0.0839, 0.0902, 0.1830, 0.2066], + device='cuda:0'), in_proj_covar=tensor([0.0278, 0.0305, 0.0350, 0.0278, 0.0259, 0.0256, 0.0332, 0.0361], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 17:52:15,738 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6693, 2.8109, 2.9679, 2.2844, 2.9537, 2.5711, 2.8334, 2.8834], + device='cuda:0'), covar=tensor([0.0542, 0.0451, 0.0430, 0.0830, 0.0469, 0.0470, 0.0465, 0.0385], + device='cuda:0'), in_proj_covar=tensor([0.0072, 0.0080, 0.0077, 0.0107, 0.0074, 0.0075, 0.0073, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 17:52:18,343 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.21 vs. limit=5.0 +2023-03-28 17:52:39,282 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-03-28 17:53:09,064 INFO [train.py:892] (0/4) Epoch 24, batch 1150, loss[loss=0.1456, simple_loss=0.2238, pruned_loss=0.03367, over 19955.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2518, pruned_loss=0.05123, over 3936936.78 frames. ], batch size: 53, lr: 6.53e-03, grad_scale: 16.0 +2023-03-28 17:53:19,345 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.514e+02 3.854e+02 4.753e+02 5.981e+02 1.175e+03, threshold=9.505e+02, percent-clipped=4.0 +2023-03-28 17:53:22,174 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9515, 4.5434, 4.6347, 4.3806, 4.8650, 3.2350, 3.9193, 2.4260], + device='cuda:0'), covar=tensor([0.0149, 0.0199, 0.0137, 0.0187, 0.0138, 0.0879, 0.0855, 0.1567], + device='cuda:0'), in_proj_covar=tensor([0.0101, 0.0141, 0.0111, 0.0130, 0.0115, 0.0131, 0.0141, 0.0125], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 17:53:24,290 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0845, 4.1107, 2.5554, 4.4053, 4.5709, 1.9622, 3.7337, 3.3407], + device='cuda:0'), covar=tensor([0.0717, 0.0923, 0.2792, 0.0807, 0.0600, 0.3009, 0.1199, 0.0964], + device='cuda:0'), in_proj_covar=tensor([0.0227, 0.0251, 0.0228, 0.0262, 0.0241, 0.0202, 0.0235, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 17:54:26,788 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43848.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:54:50,034 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-03-28 17:55:06,382 INFO [train.py:892] (0/4) Epoch 24, batch 1200, loss[loss=0.1516, simple_loss=0.2301, pruned_loss=0.0365, over 19607.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2512, pruned_loss=0.05067, over 3938816.42 frames. ], batch size: 48, lr: 6.53e-03, grad_scale: 16.0 +2023-03-28 17:55:15,504 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43870.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:55:24,024 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1707, 2.9733, 3.2678, 2.9243, 3.4298, 3.3987, 4.0567, 4.4449], + device='cuda:0'), covar=tensor([0.0544, 0.1564, 0.1420, 0.2057, 0.1606, 0.1351, 0.0549, 0.0485], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0236, 0.0259, 0.0248, 0.0288, 0.0250, 0.0219, 0.0242], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 17:55:53,554 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7262, 3.3394, 3.5035, 3.7065, 3.4984, 3.6612, 3.8295, 3.9724], + device='cuda:0'), covar=tensor([0.0678, 0.0490, 0.0523, 0.0400, 0.0741, 0.0544, 0.0436, 0.0331], + device='cuda:0'), in_proj_covar=tensor([0.0145, 0.0170, 0.0193, 0.0166, 0.0165, 0.0147, 0.0143, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 17:56:08,529 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.1815, 5.4894, 5.4991, 5.3822, 5.1648, 5.4507, 4.8867, 4.9738], + device='cuda:0'), covar=tensor([0.0383, 0.0400, 0.0446, 0.0408, 0.0467, 0.0512, 0.0674, 0.0929], + device='cuda:0'), in_proj_covar=tensor([0.0250, 0.0262, 0.0280, 0.0243, 0.0244, 0.0234, 0.0251, 0.0296], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 17:56:47,376 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43909.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:56:49,468 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9659, 3.3801, 3.5064, 3.9251, 2.5982, 3.2504, 2.5668, 2.5649], + device='cuda:0'), covar=tensor([0.0512, 0.1429, 0.0838, 0.0375, 0.1904, 0.0768, 0.1159, 0.1507], + device='cuda:0'), in_proj_covar=tensor([0.0233, 0.0329, 0.0242, 0.0190, 0.0241, 0.0199, 0.0210, 0.0211], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 17:56:57,932 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8336, 4.0530, 4.2929, 4.9406, 3.0957, 3.4111, 3.2841, 3.0391], + device='cuda:0'), covar=tensor([0.0399, 0.2004, 0.0750, 0.0277, 0.1973, 0.1019, 0.1031, 0.1521], + device='cuda:0'), in_proj_covar=tensor([0.0233, 0.0329, 0.0242, 0.0190, 0.0241, 0.0199, 0.0210, 0.0211], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 17:57:04,534 INFO [train.py:892] (0/4) Epoch 24, batch 1250, loss[loss=0.1646, simple_loss=0.2361, pruned_loss=0.04655, over 19817.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2492, pruned_loss=0.04985, over 3942708.26 frames. ], batch size: 123, lr: 6.52e-03, grad_scale: 8.0 +2023-03-28 17:57:16,405 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.338e+02 3.868e+02 4.714e+02 5.719e+02 9.399e+02, threshold=9.429e+02, percent-clipped=0.0 +2023-03-28 17:58:46,036 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43959.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 17:59:01,801 INFO [train.py:892] (0/4) Epoch 24, batch 1300, loss[loss=0.1725, simple_loss=0.2453, pruned_loss=0.04986, over 19838.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2489, pruned_loss=0.04941, over 3944649.94 frames. ], batch size: 166, lr: 6.52e-03, grad_scale: 8.0 +2023-03-28 17:59:13,344 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43970.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:00:18,546 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-44000.pt +2023-03-28 18:00:59,572 INFO [train.py:892] (0/4) Epoch 24, batch 1350, loss[loss=0.1772, simple_loss=0.2472, pruned_loss=0.05354, over 19888.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.249, pruned_loss=0.04931, over 3946262.65 frames. ], batch size: 176, lr: 6.52e-03, grad_scale: 8.0 +2023-03-28 18:01:06,697 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1119, 3.2146, 2.0597, 3.2671, 3.3472, 1.6446, 2.7868, 2.5518], + device='cuda:0'), covar=tensor([0.0838, 0.0869, 0.2585, 0.0833, 0.0615, 0.2548, 0.1168, 0.0931], + device='cuda:0'), in_proj_covar=tensor([0.0224, 0.0247, 0.0225, 0.0259, 0.0237, 0.0199, 0.0232, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 18:01:08,626 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44020.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:01:09,754 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.475e+02 3.867e+02 4.545e+02 5.331e+02 8.729e+02, threshold=9.091e+02, percent-clipped=0.0 +2023-03-28 18:02:45,804 INFO [train.py:892] (0/4) Epoch 24, batch 1400, loss[loss=0.1915, simple_loss=0.2603, pruned_loss=0.06138, over 19751.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2483, pruned_loss=0.04912, over 3947403.95 frames. ], batch size: 209, lr: 6.51e-03, grad_scale: 8.0 +2023-03-28 18:04:35,907 INFO [train.py:892] (0/4) Epoch 24, batch 1450, loss[loss=0.1653, simple_loss=0.2465, pruned_loss=0.04199, over 19869.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2498, pruned_loss=0.04967, over 3947411.76 frames. ], batch size: 89, lr: 6.51e-03, grad_scale: 8.0 +2023-03-28 18:04:46,672 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.585e+02 3.832e+02 4.632e+02 5.399e+02 1.168e+03, threshold=9.265e+02, percent-clipped=4.0 +2023-03-28 18:05:40,653 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44143.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:06:31,953 INFO [train.py:892] (0/4) Epoch 24, batch 1500, loss[loss=0.2116, simple_loss=0.2853, pruned_loss=0.06894, over 19628.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2508, pruned_loss=0.05017, over 3945971.81 frames. ], batch size: 343, lr: 6.50e-03, grad_scale: 8.0 +2023-03-28 18:06:40,674 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44170.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:06:48,776 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1269, 4.1380, 2.5059, 4.4769, 4.6227, 2.0690, 3.7050, 3.3749], + device='cuda:0'), covar=tensor([0.0657, 0.0896, 0.2693, 0.0722, 0.0570, 0.2727, 0.1164, 0.0839], + device='cuda:0'), in_proj_covar=tensor([0.0225, 0.0250, 0.0227, 0.0262, 0.0239, 0.0200, 0.0234, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 18:07:35,908 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0140, 2.7539, 3.0832, 2.8289, 3.2790, 3.2010, 3.8545, 4.1913], + device='cuda:0'), covar=tensor([0.0543, 0.1839, 0.1612, 0.2153, 0.1583, 0.1486, 0.0631, 0.0716], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0238, 0.0261, 0.0250, 0.0289, 0.0252, 0.0223, 0.0245], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 18:07:54,108 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3467, 5.6530, 5.6694, 5.5685, 5.2671, 5.6253, 5.0438, 5.1052], + device='cuda:0'), covar=tensor([0.0362, 0.0370, 0.0437, 0.0387, 0.0574, 0.0494, 0.0699, 0.0866], + device='cuda:0'), in_proj_covar=tensor([0.0250, 0.0260, 0.0280, 0.0241, 0.0245, 0.0235, 0.0251, 0.0297], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 18:08:30,428 INFO [train.py:892] (0/4) Epoch 24, batch 1550, loss[loss=0.1685, simple_loss=0.2444, pruned_loss=0.04633, over 19853.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2509, pruned_loss=0.04997, over 3945306.37 frames. ], batch size: 81, lr: 6.50e-03, grad_scale: 8.0 +2023-03-28 18:08:35,649 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44218.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:08:41,917 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.584e+02 4.103e+02 5.047e+02 5.818e+02 1.108e+03, threshold=1.009e+03, percent-clipped=1.0 +2023-03-28 18:10:28,858 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44265.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:10:30,408 INFO [train.py:892] (0/4) Epoch 24, batch 1600, loss[loss=0.1678, simple_loss=0.2373, pruned_loss=0.0491, over 19805.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2507, pruned_loss=0.04985, over 3947216.07 frames. ], batch size: 174, lr: 6.50e-03, grad_scale: 8.0 +2023-03-28 18:11:31,813 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0585, 2.4091, 3.8394, 3.3420, 3.8339, 3.8691, 3.7192, 3.6783], + device='cuda:0'), covar=tensor([0.0488, 0.0942, 0.0113, 0.0569, 0.0129, 0.0220, 0.0190, 0.0166], + device='cuda:0'), in_proj_covar=tensor([0.0095, 0.0101, 0.0082, 0.0151, 0.0080, 0.0094, 0.0087, 0.0082], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 18:11:56,315 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2085, 4.2580, 2.5520, 4.5117, 4.6692, 2.0666, 3.9127, 3.3738], + device='cuda:0'), covar=tensor([0.0642, 0.0763, 0.2561, 0.0686, 0.0514, 0.2701, 0.0986, 0.0826], + device='cuda:0'), in_proj_covar=tensor([0.0224, 0.0249, 0.0227, 0.0262, 0.0239, 0.0200, 0.0233, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 18:12:24,813 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44315.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:12:26,294 INFO [train.py:892] (0/4) Epoch 24, batch 1650, loss[loss=0.1593, simple_loss=0.2451, pruned_loss=0.03676, over 19727.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2506, pruned_loss=0.04991, over 3946580.36 frames. ], batch size: 52, lr: 6.49e-03, grad_scale: 8.0 +2023-03-28 18:12:36,790 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.188e+02 4.041e+02 4.652e+02 5.558e+02 9.682e+02, threshold=9.304e+02, percent-clipped=0.0 +2023-03-28 18:14:04,262 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8967, 3.8041, 4.2151, 3.8221, 3.6986, 4.0880, 3.9051, 4.2893], + device='cuda:0'), covar=tensor([0.0961, 0.0379, 0.0401, 0.0432, 0.1023, 0.0535, 0.0495, 0.0364], + device='cuda:0'), in_proj_covar=tensor([0.0278, 0.0218, 0.0217, 0.0228, 0.0205, 0.0232, 0.0226, 0.0209], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 18:14:21,546 INFO [train.py:892] (0/4) Epoch 24, batch 1700, loss[loss=0.156, simple_loss=0.2318, pruned_loss=0.04008, over 19740.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2505, pruned_loss=0.04971, over 3948366.69 frames. ], batch size: 92, lr: 6.49e-03, grad_scale: 8.0 +2023-03-28 18:14:54,202 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7080, 3.8808, 3.8731, 3.7539, 3.6977, 3.8337, 3.3833, 3.3903], + device='cuda:0'), covar=tensor([0.0563, 0.0583, 0.0649, 0.0538, 0.0689, 0.0666, 0.0756, 0.1109], + device='cuda:0'), in_proj_covar=tensor([0.0250, 0.0259, 0.0279, 0.0240, 0.0243, 0.0234, 0.0250, 0.0295], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 18:16:13,211 INFO [train.py:892] (0/4) Epoch 24, batch 1750, loss[loss=0.1746, simple_loss=0.2573, pruned_loss=0.04598, over 19660.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2497, pruned_loss=0.04969, over 3949386.04 frames. ], batch size: 50, lr: 6.49e-03, grad_scale: 8.0 +2023-03-28 18:16:22,188 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.732e+02 4.086e+02 4.781e+02 5.963e+02 1.155e+03, threshold=9.562e+02, percent-clipped=1.0 +2023-03-28 18:17:12,361 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44443.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:17:56,848 INFO [train.py:892] (0/4) Epoch 24, batch 1800, loss[loss=0.1812, simple_loss=0.2609, pruned_loss=0.05072, over 19581.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2509, pruned_loss=0.05083, over 3948560.91 frames. ], batch size: 49, lr: 6.48e-03, grad_scale: 8.0 +2023-03-28 18:18:42,265 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44491.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:19:13,582 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0172, 3.3136, 2.7776, 2.4522, 2.8986, 3.3530, 3.1475, 3.1356], + device='cuda:0'), covar=tensor([0.0270, 0.0260, 0.0275, 0.0466, 0.0328, 0.0188, 0.0196, 0.0213], + device='cuda:0'), in_proj_covar=tensor([0.0093, 0.0087, 0.0093, 0.0096, 0.0098, 0.0077, 0.0076, 0.0077], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 18:19:30,400 INFO [train.py:892] (0/4) Epoch 24, batch 1850, loss[loss=0.1834, simple_loss=0.2672, pruned_loss=0.04978, over 19843.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2544, pruned_loss=0.05129, over 3946836.32 frames. ], batch size: 58, lr: 6.48e-03, grad_scale: 8.0 +2023-03-28 18:19:37,937 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-24.pt +2023-03-28 18:20:24,350 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.697e+02 4.242e+02 4.967e+02 6.010e+02 1.010e+03, threshold=9.934e+02, percent-clipped=1.0 +2023-03-28 18:20:24,374 INFO [train.py:892] (0/4) Epoch 25, batch 0, loss[loss=0.157, simple_loss=0.2441, pruned_loss=0.03502, over 19734.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2441, pruned_loss=0.03502, over 19734.00 frames. ], batch size: 76, lr: 6.35e-03, grad_scale: 8.0 +2023-03-28 18:20:24,375 INFO [train.py:917] (0/4) Computing validation loss +2023-03-28 18:20:49,534 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5553, 4.0186, 3.8772, 3.9080, 3.9415, 3.9075, 3.8267, 3.5843], + device='cuda:0'), covar=tensor([0.2184, 0.1246, 0.1593, 0.1311, 0.1237, 0.0907, 0.1747, 0.2412], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0310, 0.0354, 0.0283, 0.0264, 0.0261, 0.0340, 0.0370], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 18:20:53,332 INFO [train.py:926] (0/4) Epoch 25, validation: loss=0.1751, simple_loss=0.2485, pruned_loss=0.05079, over 2883724.00 frames. +2023-03-28 18:20:53,333 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-28 18:21:07,731 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44527.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:21:57,559 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44550.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:22:06,433 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.91 vs. limit=5.0 +2023-03-28 18:22:31,239 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44565.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:22:45,082 INFO [train.py:892] (0/4) Epoch 25, batch 50, loss[loss=0.1761, simple_loss=0.2489, pruned_loss=0.05161, over 19738.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2432, pruned_loss=0.04683, over 891651.50 frames. ], batch size: 106, lr: 6.34e-03, grad_scale: 8.0 +2023-03-28 18:23:23,467 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44588.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:24:18,357 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44611.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:24:23,566 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44613.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:24:29,060 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44615.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:24:45,052 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.512e+02 3.985e+02 4.536e+02 5.355e+02 8.886e+02, threshold=9.072e+02, percent-clipped=0.0 +2023-03-28 18:24:45,082 INFO [train.py:892] (0/4) Epoch 25, batch 100, loss[loss=0.1677, simple_loss=0.2418, pruned_loss=0.04684, over 19690.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.245, pruned_loss=0.04738, over 1570073.62 frames. ], batch size: 74, lr: 6.34e-03, grad_scale: 8.0 +2023-03-28 18:25:37,353 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-28 18:26:13,749 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-28 18:26:29,897 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44663.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:26:37,945 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-03-28 18:26:47,950 INFO [train.py:892] (0/4) Epoch 25, batch 150, loss[loss=0.25, simple_loss=0.3511, pruned_loss=0.07446, over 18692.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2476, pruned_loss=0.04821, over 2096377.78 frames. ], batch size: 564, lr: 6.33e-03, grad_scale: 8.0 +2023-03-28 18:27:38,785 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-03-28 18:28:52,025 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.576e+02 4.007e+02 4.679e+02 6.121e+02 1.422e+03, threshold=9.359e+02, percent-clipped=5.0 +2023-03-28 18:28:52,054 INFO [train.py:892] (0/4) Epoch 25, batch 200, loss[loss=0.1896, simple_loss=0.2784, pruned_loss=0.05036, over 19675.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2505, pruned_loss=0.04991, over 2506531.15 frames. ], batch size: 49, lr: 6.33e-03, grad_scale: 8.0 +2023-03-28 18:29:00,409 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-28 18:30:53,962 INFO [train.py:892] (0/4) Epoch 25, batch 250, loss[loss=0.1486, simple_loss=0.2241, pruned_loss=0.03652, over 19878.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2508, pruned_loss=0.04984, over 2827313.54 frames. ], batch size: 47, lr: 6.33e-03, grad_scale: 8.0 +2023-03-28 18:32:57,520 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.592e+02 3.844e+02 4.514e+02 5.418e+02 9.178e+02, threshold=9.028e+02, percent-clipped=0.0 +2023-03-28 18:32:57,555 INFO [train.py:892] (0/4) Epoch 25, batch 300, loss[loss=0.1921, simple_loss=0.2603, pruned_loss=0.06193, over 19854.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2514, pruned_loss=0.05018, over 3074122.80 frames. ], batch size: 142, lr: 6.32e-03, grad_scale: 8.0 +2023-03-28 18:34:27,541 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0253, 3.6306, 3.8090, 4.0317, 3.7097, 3.9654, 4.1173, 4.2820], + device='cuda:0'), covar=tensor([0.0671, 0.0502, 0.0522, 0.0367, 0.0822, 0.0583, 0.0438, 0.0315], + device='cuda:0'), in_proj_covar=tensor([0.0148, 0.0171, 0.0194, 0.0169, 0.0169, 0.0150, 0.0147, 0.0191], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 18:35:01,674 INFO [train.py:892] (0/4) Epoch 25, batch 350, loss[loss=0.1575, simple_loss=0.2328, pruned_loss=0.04109, over 19806.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2498, pruned_loss=0.04979, over 3269195.90 frames. ], batch size: 82, lr: 6.32e-03, grad_scale: 8.0 +2023-03-28 18:35:28,949 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44883.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:36:17,779 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.56 vs. limit=5.0 +2023-03-28 18:36:26,334 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44906.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:37:01,645 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.886e+02 4.144e+02 4.807e+02 5.961e+02 1.159e+03, threshold=9.615e+02, percent-clipped=3.0 +2023-03-28 18:37:01,673 INFO [train.py:892] (0/4) Epoch 25, batch 400, loss[loss=0.1773, simple_loss=0.2468, pruned_loss=0.05394, over 19847.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2494, pruned_loss=0.04944, over 3422057.35 frames. ], batch size: 145, lr: 6.32e-03, grad_scale: 8.0 +2023-03-28 18:38:41,539 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4272, 3.5303, 2.1529, 3.7072, 3.7695, 1.7714, 3.0654, 2.8769], + device='cuda:0'), covar=tensor([0.0829, 0.0813, 0.2827, 0.0727, 0.0626, 0.2805, 0.1228, 0.0942], + device='cuda:0'), in_proj_covar=tensor([0.0226, 0.0249, 0.0226, 0.0262, 0.0241, 0.0200, 0.0233, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 18:38:41,886 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-28 18:39:03,909 INFO [train.py:892] (0/4) Epoch 25, batch 450, loss[loss=0.165, simple_loss=0.2405, pruned_loss=0.04481, over 19793.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2513, pruned_loss=0.05036, over 3537068.50 frames. ], batch size: 83, lr: 6.31e-03, grad_scale: 8.0 +2023-03-28 18:40:59,618 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.643e+02 4.015e+02 4.807e+02 5.728e+02 9.598e+02, threshold=9.613e+02, percent-clipped=0.0 +2023-03-28 18:40:59,647 INFO [train.py:892] (0/4) Epoch 25, batch 500, loss[loss=0.1725, simple_loss=0.2523, pruned_loss=0.04633, over 19695.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2511, pruned_loss=0.05044, over 3629025.55 frames. ], batch size: 56, lr: 6.31e-03, grad_scale: 8.0 +2023-03-28 18:42:20,462 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45056.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:42:52,454 INFO [train.py:892] (0/4) Epoch 25, batch 550, loss[loss=0.1709, simple_loss=0.255, pruned_loss=0.04342, over 19661.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2522, pruned_loss=0.05079, over 3699642.45 frames. ], batch size: 50, lr: 6.31e-03, grad_scale: 8.0 +2023-03-28 18:43:20,530 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45081.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:44:02,705 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9370, 2.8478, 1.7172, 3.4614, 3.1352, 3.3480, 3.4837, 2.7437], + device='cuda:0'), covar=tensor([0.0652, 0.0650, 0.1763, 0.0581, 0.0603, 0.0495, 0.0580, 0.0816], + device='cuda:0'), in_proj_covar=tensor([0.0137, 0.0136, 0.0139, 0.0142, 0.0126, 0.0126, 0.0139, 0.0139], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 18:44:44,832 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45117.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 18:44:46,821 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5561, 2.8455, 3.4765, 3.0807, 3.7802, 3.7240, 4.4467, 4.9032], + device='cuda:0'), covar=tensor([0.0504, 0.1823, 0.1364, 0.2006, 0.1636, 0.1331, 0.0494, 0.0409], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0236, 0.0258, 0.0247, 0.0289, 0.0251, 0.0222, 0.0245], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 18:44:51,746 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.460e+02 3.947e+02 4.547e+02 5.526e+02 8.636e+02, threshold=9.094e+02, percent-clipped=0.0 +2023-03-28 18:44:51,772 INFO [train.py:892] (0/4) Epoch 25, batch 600, loss[loss=0.1529, simple_loss=0.2392, pruned_loss=0.03337, over 19939.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2517, pruned_loss=0.05094, over 3755840.57 frames. ], batch size: 52, lr: 6.30e-03, grad_scale: 8.0 +2023-03-28 18:45:31,204 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.78 vs. limit=5.0 +2023-03-28 18:45:42,843 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3049, 4.0643, 4.1368, 3.8077, 4.2615, 3.0629, 3.5672, 1.9442], + device='cuda:0'), covar=tensor([0.0210, 0.0239, 0.0150, 0.0219, 0.0163, 0.0986, 0.0751, 0.1735], + device='cuda:0'), in_proj_covar=tensor([0.0102, 0.0143, 0.0111, 0.0131, 0.0117, 0.0131, 0.0140, 0.0125], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 18:45:45,458 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45142.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:46:55,686 INFO [train.py:892] (0/4) Epoch 25, batch 650, loss[loss=0.1624, simple_loss=0.2358, pruned_loss=0.0445, over 19740.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2508, pruned_loss=0.05035, over 3797788.56 frames. ], batch size: 140, lr: 6.30e-03, grad_scale: 8.0 +2023-03-28 18:47:23,626 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45183.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:48:01,143 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7666, 3.2750, 3.6324, 3.2650, 3.9261, 3.9313, 4.5229, 5.1319], + device='cuda:0'), covar=tensor([0.0448, 0.1479, 0.1286, 0.2029, 0.1542, 0.1396, 0.0557, 0.0375], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0235, 0.0258, 0.0247, 0.0288, 0.0251, 0.0222, 0.0245], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 18:48:06,338 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-03-28 18:48:24,659 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45206.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:48:58,096 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.622e+02 4.071e+02 4.790e+02 5.596e+02 1.238e+03, threshold=9.579e+02, percent-clipped=2.0 +2023-03-28 18:48:58,130 INFO [train.py:892] (0/4) Epoch 25, batch 700, loss[loss=0.1389, simple_loss=0.2189, pruned_loss=0.02945, over 19711.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2519, pruned_loss=0.05121, over 3830369.06 frames. ], batch size: 78, lr: 6.30e-03, grad_scale: 8.0 +2023-03-28 18:49:21,578 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45231.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:50:14,202 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45254.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:50:27,456 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1168, 2.5414, 3.9010, 3.4447, 3.8885, 3.9579, 3.7543, 3.7080], + device='cuda:0'), covar=tensor([0.0453, 0.0873, 0.0102, 0.0559, 0.0128, 0.0208, 0.0174, 0.0165], + device='cuda:0'), in_proj_covar=tensor([0.0095, 0.0101, 0.0083, 0.0151, 0.0081, 0.0094, 0.0088, 0.0083], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 18:50:54,012 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-28 18:50:54,523 INFO [train.py:892] (0/4) Epoch 25, batch 750, loss[loss=0.1811, simple_loss=0.2556, pruned_loss=0.05331, over 19882.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2516, pruned_loss=0.05057, over 3856704.58 frames. ], batch size: 62, lr: 6.29e-03, grad_scale: 8.0 +2023-03-28 18:51:44,807 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-03-28 18:52:40,096 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-03-28 18:52:49,364 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.500e+02 3.772e+02 4.659e+02 5.559e+02 8.961e+02, threshold=9.318e+02, percent-clipped=0.0 +2023-03-28 18:52:49,387 INFO [train.py:892] (0/4) Epoch 25, batch 800, loss[loss=0.2034, simple_loss=0.2913, pruned_loss=0.05777, over 19668.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2507, pruned_loss=0.05024, over 3877735.44 frames. ], batch size: 55, lr: 6.29e-03, grad_scale: 8.0 +2023-03-28 18:53:53,776 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45348.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 18:54:43,304 INFO [train.py:892] (0/4) Epoch 25, batch 850, loss[loss=0.1588, simple_loss=0.2321, pruned_loss=0.04273, over 19883.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2502, pruned_loss=0.04994, over 3894707.75 frames. ], batch size: 158, lr: 6.29e-03, grad_scale: 8.0 +2023-03-28 18:55:17,007 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8384, 4.9060, 5.2207, 5.0456, 5.0834, 4.7496, 4.9515, 4.8221], + device='cuda:0'), covar=tensor([0.1357, 0.1540, 0.0826, 0.1165, 0.0723, 0.0810, 0.1791, 0.1866], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0312, 0.0355, 0.0287, 0.0263, 0.0263, 0.0339, 0.0372], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 18:56:15,140 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45409.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 18:56:20,776 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45412.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 18:56:43,405 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.654e+02 4.187e+02 5.025e+02 5.805e+02 1.375e+03, threshold=1.005e+03, percent-clipped=1.0 +2023-03-28 18:56:43,431 INFO [train.py:892] (0/4) Epoch 25, batch 900, loss[loss=0.1572, simple_loss=0.2311, pruned_loss=0.04161, over 19813.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.25, pruned_loss=0.04982, over 3906259.31 frames. ], batch size: 173, lr: 6.28e-03, grad_scale: 8.0 +2023-03-28 18:57:23,567 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45437.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 18:58:45,865 INFO [train.py:892] (0/4) Epoch 25, batch 950, loss[loss=0.1593, simple_loss=0.2395, pruned_loss=0.03957, over 19770.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2497, pruned_loss=0.04958, over 3916000.08 frames. ], batch size: 217, lr: 6.28e-03, grad_scale: 8.0 +2023-03-28 18:59:31,934 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.1678, 4.7292, 4.8209, 4.5606, 5.0551, 3.2518, 4.1629, 2.5818], + device='cuda:0'), covar=tensor([0.0150, 0.0196, 0.0124, 0.0176, 0.0129, 0.0892, 0.0793, 0.1417], + device='cuda:0'), in_proj_covar=tensor([0.0103, 0.0143, 0.0111, 0.0131, 0.0117, 0.0132, 0.0142, 0.0125], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 18:59:52,097 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6519, 4.9057, 4.9623, 4.8362, 4.6435, 4.9333, 4.4150, 4.4054], + device='cuda:0'), covar=tensor([0.0490, 0.0509, 0.0491, 0.0432, 0.0559, 0.0491, 0.0654, 0.1046], + device='cuda:0'), in_proj_covar=tensor([0.0254, 0.0266, 0.0284, 0.0245, 0.0248, 0.0237, 0.0255, 0.0300], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 19:00:40,922 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.498e+02 3.760e+02 4.893e+02 5.936e+02 1.409e+03, threshold=9.787e+02, percent-clipped=1.0 +2023-03-28 19:00:40,948 INFO [train.py:892] (0/4) Epoch 25, batch 1000, loss[loss=0.1781, simple_loss=0.2539, pruned_loss=0.05112, over 19710.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.251, pruned_loss=0.05072, over 3922252.84 frames. ], batch size: 109, lr: 6.28e-03, grad_scale: 8.0 +2023-03-28 19:01:39,275 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-03-28 19:02:41,470 INFO [train.py:892] (0/4) Epoch 25, batch 1050, loss[loss=0.1977, simple_loss=0.2798, pruned_loss=0.05773, over 19781.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2505, pruned_loss=0.05035, over 3928572.85 frames. ], batch size: 52, lr: 6.27e-03, grad_scale: 8.0 +2023-03-28 19:03:54,985 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4637, 3.3892, 3.7334, 2.7107, 3.8070, 3.1529, 3.3125, 3.8673], + device='cuda:0'), covar=tensor([0.0639, 0.0387, 0.0433, 0.0782, 0.0287, 0.0353, 0.0446, 0.0243], + device='cuda:0'), in_proj_covar=tensor([0.0071, 0.0079, 0.0077, 0.0105, 0.0073, 0.0075, 0.0072, 0.0066], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 19:03:56,993 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1196, 3.0623, 1.8152, 3.7601, 3.4037, 3.7210, 3.7373, 2.9065], + device='cuda:0'), covar=tensor([0.0653, 0.0740, 0.1797, 0.0641, 0.0700, 0.0461, 0.0681, 0.0863], + device='cuda:0'), in_proj_covar=tensor([0.0138, 0.0137, 0.0140, 0.0145, 0.0127, 0.0127, 0.0141, 0.0140], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 19:04:14,331 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.5137, 5.9066, 6.0089, 5.8956, 5.6859, 5.6345, 5.7071, 5.6794], + device='cuda:0'), covar=tensor([0.1338, 0.1245, 0.0916, 0.1131, 0.0693, 0.0826, 0.2008, 0.1823], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0311, 0.0354, 0.0287, 0.0264, 0.0265, 0.0338, 0.0371], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 19:04:40,793 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.489e+02 3.919e+02 4.482e+02 5.377e+02 1.345e+03, threshold=8.964e+02, percent-clipped=5.0 +2023-03-28 19:04:40,820 INFO [train.py:892] (0/4) Epoch 25, batch 1100, loss[loss=0.1541, simple_loss=0.2263, pruned_loss=0.0409, over 19848.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2506, pruned_loss=0.05036, over 3933090.67 frames. ], batch size: 137, lr: 6.27e-03, grad_scale: 8.0 +2023-03-28 19:05:25,727 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7385, 4.7850, 5.0901, 4.9179, 4.9955, 4.4518, 4.8140, 4.6577], + device='cuda:0'), covar=tensor([0.1346, 0.1590, 0.0907, 0.1170, 0.0738, 0.1001, 0.1970, 0.1871], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0313, 0.0355, 0.0288, 0.0265, 0.0266, 0.0340, 0.0371], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 19:06:36,323 INFO [train.py:892] (0/4) Epoch 25, batch 1150, loss[loss=0.2116, simple_loss=0.2916, pruned_loss=0.06578, over 19644.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2514, pruned_loss=0.05122, over 3936070.36 frames. ], batch size: 330, lr: 6.27e-03, grad_scale: 8.0 +2023-03-28 19:07:40,524 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2114, 4.0089, 4.0755, 3.8210, 4.1961, 2.9424, 3.4928, 1.9967], + device='cuda:0'), covar=tensor([0.0206, 0.0229, 0.0136, 0.0187, 0.0140, 0.0990, 0.0721, 0.1587], + device='cuda:0'), in_proj_covar=tensor([0.0103, 0.0143, 0.0110, 0.0130, 0.0117, 0.0131, 0.0141, 0.0124], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 19:07:52,468 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45702.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:07:57,734 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45704.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 19:07:59,842 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8218, 5.0858, 5.1388, 5.0381, 4.7321, 5.1409, 4.6036, 4.6462], + device='cuda:0'), covar=tensor([0.0444, 0.0460, 0.0498, 0.0412, 0.0612, 0.0479, 0.0690, 0.0940], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0265, 0.0283, 0.0247, 0.0249, 0.0236, 0.0253, 0.0299], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 19:08:19,122 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45712.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 19:08:39,147 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.049e+02 4.389e+02 4.900e+02 5.957e+02 9.190e+02, threshold=9.801e+02, percent-clipped=1.0 +2023-03-28 19:08:39,175 INFO [train.py:892] (0/4) Epoch 25, batch 1200, loss[loss=0.1711, simple_loss=0.2515, pruned_loss=0.04541, over 19678.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.251, pruned_loss=0.05141, over 3939850.24 frames. ], batch size: 52, lr: 6.26e-03, grad_scale: 8.0 +2023-03-28 19:09:16,092 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45737.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:10:08,266 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45760.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:10:10,114 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0036, 3.8127, 3.8561, 3.6000, 3.9883, 2.8921, 3.3336, 1.8502], + device='cuda:0'), covar=tensor([0.0223, 0.0243, 0.0157, 0.0199, 0.0155, 0.1002, 0.0692, 0.1711], + device='cuda:0'), in_proj_covar=tensor([0.0103, 0.0142, 0.0110, 0.0130, 0.0117, 0.0131, 0.0140, 0.0124], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 19:10:13,913 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45763.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:10:31,150 INFO [train.py:892] (0/4) Epoch 25, batch 1250, loss[loss=0.2, simple_loss=0.2755, pruned_loss=0.06226, over 19801.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2509, pruned_loss=0.05147, over 3940771.07 frames. ], batch size: 173, lr: 6.26e-03, grad_scale: 8.0 +2023-03-28 19:10:38,071 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45774.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:11:02,687 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45785.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:12:24,908 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.740e+02 4.060e+02 4.817e+02 5.850e+02 1.056e+03, threshold=9.634e+02, percent-clipped=3.0 +2023-03-28 19:12:24,933 INFO [train.py:892] (0/4) Epoch 25, batch 1300, loss[loss=0.1686, simple_loss=0.2524, pruned_loss=0.04241, over 19774.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2516, pruned_loss=0.05161, over 3942421.34 frames. ], batch size: 46, lr: 6.26e-03, grad_scale: 8.0 +2023-03-28 19:13:01,587 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45835.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:14:00,935 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45859.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:14:29,510 INFO [train.py:892] (0/4) Epoch 25, batch 1350, loss[loss=0.1593, simple_loss=0.235, pruned_loss=0.04182, over 19731.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2503, pruned_loss=0.05044, over 3944642.59 frames. ], batch size: 118, lr: 6.25e-03, grad_scale: 8.0 +2023-03-28 19:16:00,669 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-28 19:16:24,282 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45920.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:16:25,386 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.758e+02 4.142e+02 4.668e+02 5.534e+02 8.223e+02, threshold=9.336e+02, percent-clipped=0.0 +2023-03-28 19:16:25,425 INFO [train.py:892] (0/4) Epoch 25, batch 1400, loss[loss=0.1656, simple_loss=0.2343, pruned_loss=0.04841, over 19848.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2489, pruned_loss=0.04987, over 3946092.72 frames. ], batch size: 142, lr: 6.25e-03, grad_scale: 16.0 +2023-03-28 19:16:26,433 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2646, 3.2694, 2.0122, 3.9106, 3.5123, 3.9266, 3.9584, 3.1008], + device='cuda:0'), covar=tensor([0.0646, 0.0617, 0.1549, 0.0570, 0.0642, 0.0406, 0.0526, 0.0816], + device='cuda:0'), in_proj_covar=tensor([0.0139, 0.0138, 0.0140, 0.0145, 0.0128, 0.0128, 0.0141, 0.0141], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 19:18:21,753 INFO [train.py:892] (0/4) Epoch 25, batch 1450, loss[loss=0.1794, simple_loss=0.2445, pruned_loss=0.05717, over 19790.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.249, pruned_loss=0.04969, over 3947750.12 frames. ], batch size: 168, lr: 6.25e-03, grad_scale: 16.0 +2023-03-28 19:18:51,046 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.71 vs. limit=5.0 +2023-03-28 19:19:12,331 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2114, 2.5109, 4.0895, 3.6507, 4.0435, 4.1372, 3.9141, 3.8365], + device='cuda:0'), covar=tensor([0.0486, 0.0924, 0.0102, 0.0579, 0.0130, 0.0204, 0.0175, 0.0169], + device='cuda:0'), in_proj_covar=tensor([0.0097, 0.0103, 0.0084, 0.0153, 0.0081, 0.0096, 0.0090, 0.0084], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 19:19:38,929 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-46000.pt +2023-03-28 19:19:53,492 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46004.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 19:20:30,233 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.575e+02 3.735e+02 4.746e+02 5.527e+02 8.310e+02, threshold=9.492e+02, percent-clipped=0.0 +2023-03-28 19:20:30,267 INFO [train.py:892] (0/4) Epoch 25, batch 1500, loss[loss=0.1674, simple_loss=0.2504, pruned_loss=0.0422, over 19715.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2485, pruned_loss=0.04931, over 3948596.89 frames. ], batch size: 81, lr: 6.24e-03, grad_scale: 16.0 +2023-03-28 19:21:43,450 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46052.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 19:21:57,453 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46058.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:22:29,058 INFO [train.py:892] (0/4) Epoch 25, batch 1550, loss[loss=0.1792, simple_loss=0.2516, pruned_loss=0.05346, over 19786.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2501, pruned_loss=0.05017, over 3948491.54 frames. ], batch size: 83, lr: 6.24e-03, grad_scale: 16.0 +2023-03-28 19:23:54,165 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2018, 4.4467, 4.4833, 4.3582, 4.2129, 4.4317, 3.9784, 4.0186], + device='cuda:0'), covar=tensor([0.0527, 0.0499, 0.0517, 0.0449, 0.0666, 0.0572, 0.0714, 0.0943], + device='cuda:0'), in_proj_covar=tensor([0.0254, 0.0267, 0.0284, 0.0247, 0.0251, 0.0236, 0.0255, 0.0301], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 19:24:26,941 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.641e+02 4.209e+02 5.084e+02 6.041e+02 9.841e+02, threshold=1.017e+03, percent-clipped=2.0 +2023-03-28 19:24:26,968 INFO [train.py:892] (0/4) Epoch 25, batch 1600, loss[loss=0.1724, simple_loss=0.2508, pruned_loss=0.047, over 19800.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2503, pruned_loss=0.0501, over 3950546.06 frames. ], batch size: 79, lr: 6.24e-03, grad_scale: 16.0 +2023-03-28 19:24:34,287 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-28 19:24:49,089 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46130.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:25:21,680 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.5431, 5.7380, 5.8229, 5.8075, 5.5590, 5.7946, 5.1109, 4.7996], + device='cuda:0'), covar=tensor([0.0775, 0.0941, 0.0934, 0.0661, 0.0959, 0.1004, 0.1433, 0.2553], + device='cuda:0'), in_proj_covar=tensor([0.0256, 0.0268, 0.0286, 0.0248, 0.0251, 0.0237, 0.0255, 0.0302], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 19:26:20,484 INFO [train.py:892] (0/4) Epoch 25, batch 1650, loss[loss=0.1547, simple_loss=0.2269, pruned_loss=0.04123, over 19814.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2496, pruned_loss=0.04985, over 3949665.85 frames. ], batch size: 96, lr: 6.23e-03, grad_scale: 16.0 +2023-03-28 19:28:06,798 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46215.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:28:20,986 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.501e+02 4.063e+02 5.043e+02 6.372e+02 1.431e+03, threshold=1.009e+03, percent-clipped=3.0 +2023-03-28 19:28:21,013 INFO [train.py:892] (0/4) Epoch 25, batch 1700, loss[loss=0.1706, simple_loss=0.2474, pruned_loss=0.04688, over 19761.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2494, pruned_loss=0.04952, over 3949663.56 frames. ], batch size: 217, lr: 6.23e-03, grad_scale: 16.0 +2023-03-28 19:30:18,587 INFO [train.py:892] (0/4) Epoch 25, batch 1750, loss[loss=0.1579, simple_loss=0.2413, pruned_loss=0.03722, over 19640.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2477, pruned_loss=0.04868, over 3950594.52 frames. ], batch size: 72, lr: 6.23e-03, grad_scale: 16.0 +2023-03-28 19:30:29,021 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46276.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:30:57,734 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1402, 3.0739, 3.4631, 2.4446, 3.7329, 2.9439, 3.0426, 3.6515], + device='cuda:0'), covar=tensor([0.0723, 0.0467, 0.0498, 0.0866, 0.0242, 0.0421, 0.0496, 0.0231], + device='cuda:0'), in_proj_covar=tensor([0.0072, 0.0080, 0.0077, 0.0106, 0.0073, 0.0076, 0.0073, 0.0066], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 19:32:03,796 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.718e+02 4.184e+02 4.852e+02 5.831e+02 2.262e+03, threshold=9.705e+02, percent-clipped=1.0 +2023-03-28 19:32:03,825 INFO [train.py:892] (0/4) Epoch 25, batch 1800, loss[loss=0.1541, simple_loss=0.2248, pruned_loss=0.04165, over 19771.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2482, pruned_loss=0.04902, over 3949250.62 frames. ], batch size: 193, lr: 6.22e-03, grad_scale: 16.0 +2023-03-28 19:32:32,054 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46337.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:32:42,855 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6859, 3.7591, 2.2848, 3.9615, 4.0273, 1.9163, 3.3519, 3.0706], + device='cuda:0'), covar=tensor([0.0760, 0.0822, 0.2913, 0.0654, 0.0556, 0.2644, 0.1032, 0.0887], + device='cuda:0'), in_proj_covar=tensor([0.0229, 0.0251, 0.0229, 0.0266, 0.0245, 0.0203, 0.0237, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 19:33:09,239 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-03-28 19:33:14,691 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46358.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:33:30,314 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.63 vs. limit=5.0 +2023-03-28 19:33:38,598 INFO [train.py:892] (0/4) Epoch 25, batch 1850, loss[loss=0.1846, simple_loss=0.2737, pruned_loss=0.0477, over 19855.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2492, pruned_loss=0.04885, over 3948745.47 frames. ], batch size: 58, lr: 6.22e-03, grad_scale: 16.0 +2023-03-28 19:33:45,984 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-25.pt +2023-03-28 19:34:37,733 INFO [train.py:892] (0/4) Epoch 26, batch 0, loss[loss=0.1697, simple_loss=0.2472, pruned_loss=0.04612, over 19849.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2472, pruned_loss=0.04612, over 19849.00 frames. ], batch size: 78, lr: 6.10e-03, grad_scale: 16.0 +2023-03-28 19:34:37,734 INFO [train.py:917] (0/4) Computing validation loss +2023-03-28 19:34:56,075 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3707, 3.2529, 3.7193, 3.0592, 3.9960, 3.1936, 3.3099, 3.9028], + device='cuda:0'), covar=tensor([0.0879, 0.0420, 0.0640, 0.0702, 0.0290, 0.0427, 0.0538, 0.0298], + device='cuda:0'), in_proj_covar=tensor([0.0071, 0.0079, 0.0077, 0.0105, 0.0073, 0.0076, 0.0073, 0.0066], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 19:35:04,334 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0478, 3.1232, 3.1160, 3.0636, 2.9155, 3.0741, 2.9628, 3.2661], + device='cuda:0'), covar=tensor([0.0253, 0.0317, 0.0315, 0.0286, 0.0353, 0.0223, 0.0303, 0.0279], + device='cuda:0'), in_proj_covar=tensor([0.0079, 0.0073, 0.0075, 0.0070, 0.0083, 0.0076, 0.0094, 0.0067], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 19:35:16,909 INFO [train.py:926] (0/4) Epoch 26, validation: loss=0.176, simple_loss=0.2485, pruned_loss=0.05179, over 2883724.00 frames. +2023-03-28 19:35:16,910 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-28 19:35:36,237 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2987, 4.1166, 4.1664, 3.9148, 4.2963, 3.0623, 3.6380, 2.2586], + device='cuda:0'), covar=tensor([0.0186, 0.0205, 0.0138, 0.0184, 0.0138, 0.0897, 0.0634, 0.1356], + device='cuda:0'), in_proj_covar=tensor([0.0103, 0.0144, 0.0111, 0.0131, 0.0118, 0.0132, 0.0141, 0.0126], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 19:36:01,565 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6988, 2.4692, 2.9245, 2.5798, 3.0529, 2.9936, 3.5749, 3.8915], + device='cuda:0'), covar=tensor([0.0652, 0.1894, 0.1579, 0.2200, 0.1645, 0.1583, 0.0609, 0.0512], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0237, 0.0260, 0.0249, 0.0289, 0.0250, 0.0225, 0.0245], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 19:36:28,836 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46406.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:37:07,521 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.563e+02 3.816e+02 4.430e+02 5.059e+02 8.683e+02, threshold=8.861e+02, percent-clipped=0.0 +2023-03-28 19:37:18,520 INFO [train.py:892] (0/4) Epoch 26, batch 50, loss[loss=0.1557, simple_loss=0.2218, pruned_loss=0.04477, over 19792.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2441, pruned_loss=0.0492, over 891749.91 frames. ], batch size: 126, lr: 6.09e-03, grad_scale: 16.0 +2023-03-28 19:37:28,803 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46430.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:38:16,049 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-03-28 19:39:07,490 INFO [train.py:892] (0/4) Epoch 26, batch 100, loss[loss=0.1667, simple_loss=0.2416, pruned_loss=0.04591, over 19620.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2436, pruned_loss=0.04708, over 1570261.72 frames. ], batch size: 65, lr: 6.09e-03, grad_scale: 16.0 +2023-03-28 19:39:12,024 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46478.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:40:31,542 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46515.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:40:42,633 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.820e+02 4.177e+02 4.877e+02 5.600e+02 1.186e+03, threshold=9.755e+02, percent-clipped=5.0 +2023-03-28 19:40:54,307 INFO [train.py:892] (0/4) Epoch 26, batch 150, loss[loss=0.1652, simple_loss=0.2287, pruned_loss=0.05087, over 19866.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2421, pruned_loss=0.04575, over 2097925.02 frames. ], batch size: 129, lr: 6.09e-03, grad_scale: 16.0 +2023-03-28 19:42:14,548 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46563.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:42:42,332 INFO [train.py:892] (0/4) Epoch 26, batch 200, loss[loss=0.1604, simple_loss=0.2384, pruned_loss=0.04126, over 19712.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2444, pruned_loss=0.04647, over 2509289.05 frames. ], batch size: 101, lr: 6.08e-03, grad_scale: 16.0 +2023-03-28 19:43:25,367 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46595.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:43:42,296 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46602.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:44:24,549 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.062e+02 3.750e+02 4.526e+02 5.438e+02 1.075e+03, threshold=9.053e+02, percent-clipped=3.0 +2023-03-28 19:44:31,716 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9327, 3.6202, 3.7268, 3.9485, 3.7050, 3.9929, 3.9792, 4.1474], + device='cuda:0'), covar=tensor([0.0859, 0.0646, 0.0738, 0.0513, 0.0881, 0.0722, 0.0775, 0.0525], + device='cuda:0'), in_proj_covar=tensor([0.0149, 0.0174, 0.0196, 0.0170, 0.0169, 0.0152, 0.0147, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 19:44:34,758 INFO [train.py:892] (0/4) Epoch 26, batch 250, loss[loss=0.1641, simple_loss=0.2339, pruned_loss=0.04716, over 19908.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2444, pruned_loss=0.04655, over 2827355.82 frames. ], batch size: 116, lr: 6.08e-03, grad_scale: 16.0 +2023-03-28 19:44:49,448 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46632.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:44:52,185 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8419, 4.5223, 4.5940, 4.3554, 4.8112, 3.1867, 3.9578, 2.3103], + device='cuda:0'), covar=tensor([0.0182, 0.0226, 0.0146, 0.0182, 0.0142, 0.0945, 0.0803, 0.1516], + device='cuda:0'), in_proj_covar=tensor([0.0102, 0.0142, 0.0111, 0.0130, 0.0117, 0.0132, 0.0140, 0.0125], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 19:45:46,798 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46656.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 19:46:01,335 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46663.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:46:01,470 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5127, 2.5718, 2.7438, 2.4452, 2.9471, 2.9232, 3.3615, 3.7025], + device='cuda:0'), covar=tensor([0.0676, 0.1609, 0.1693, 0.2144, 0.1458, 0.1396, 0.0656, 0.0599], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0237, 0.0261, 0.0251, 0.0290, 0.0251, 0.0225, 0.0245], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 19:46:14,879 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9304, 2.9718, 1.9254, 3.0172, 3.1242, 1.4988, 2.5922, 2.3968], + device='cuda:0'), covar=tensor([0.0869, 0.0819, 0.2528, 0.0762, 0.0636, 0.2423, 0.1053, 0.1018], + device='cuda:0'), in_proj_covar=tensor([0.0227, 0.0249, 0.0226, 0.0265, 0.0244, 0.0201, 0.0236, 0.0191], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 19:46:31,114 INFO [train.py:892] (0/4) Epoch 26, batch 300, loss[loss=0.1827, simple_loss=0.2573, pruned_loss=0.05403, over 19733.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2456, pruned_loss=0.04704, over 3076174.51 frames. ], batch size: 76, lr: 6.08e-03, grad_scale: 16.0 +2023-03-28 19:46:58,156 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-03-28 19:48:12,956 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.886e+02 3.778e+02 4.525e+02 5.535e+02 8.157e+02, threshold=9.049e+02, percent-clipped=0.0 +2023-03-28 19:48:23,024 INFO [train.py:892] (0/4) Epoch 26, batch 350, loss[loss=0.1748, simple_loss=0.2403, pruned_loss=0.05463, over 19769.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2475, pruned_loss=0.04764, over 3267094.60 frames. ], batch size: 253, lr: 6.07e-03, grad_scale: 16.0 +2023-03-28 19:50:21,467 INFO [train.py:892] (0/4) Epoch 26, batch 400, loss[loss=0.1626, simple_loss=0.2402, pruned_loss=0.04252, over 19849.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2474, pruned_loss=0.04753, over 3419136.87 frames. ], batch size: 115, lr: 6.07e-03, grad_scale: 16.0 +2023-03-28 19:50:45,523 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4488, 4.7165, 4.7327, 4.6189, 4.4299, 4.7144, 4.2156, 4.2343], + device='cuda:0'), covar=tensor([0.0528, 0.0458, 0.0561, 0.0471, 0.0679, 0.0562, 0.0756, 0.1042], + device='cuda:0'), in_proj_covar=tensor([0.0256, 0.0267, 0.0288, 0.0247, 0.0252, 0.0239, 0.0257, 0.0303], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 19:51:41,766 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46810.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:51:43,851 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9595, 3.8505, 4.2763, 3.8713, 3.7521, 4.1578, 3.9281, 4.3662], + device='cuda:0'), covar=tensor([0.0991, 0.0455, 0.0461, 0.0478, 0.1072, 0.0689, 0.0620, 0.0453], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0222, 0.0221, 0.0231, 0.0206, 0.0234, 0.0230, 0.0212], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 19:52:09,141 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.488e+02 3.851e+02 4.931e+02 6.030e+02 1.390e+03, threshold=9.862e+02, percent-clipped=3.0 +2023-03-28 19:52:19,760 INFO [train.py:892] (0/4) Epoch 26, batch 450, loss[loss=0.1769, simple_loss=0.2578, pruned_loss=0.04802, over 19661.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2461, pruned_loss=0.04684, over 3538355.95 frames. ], batch size: 51, lr: 6.07e-03, grad_scale: 16.0 +2023-03-28 19:52:54,327 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1507, 2.2498, 2.3533, 2.2597, 2.2627, 2.2879, 2.2515, 2.3028], + device='cuda:0'), covar=tensor([0.0413, 0.0290, 0.0291, 0.0275, 0.0406, 0.0319, 0.0419, 0.0349], + device='cuda:0'), in_proj_covar=tensor([0.0078, 0.0072, 0.0074, 0.0069, 0.0082, 0.0076, 0.0093, 0.0067], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 19:54:06,070 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46871.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:54:16,458 INFO [train.py:892] (0/4) Epoch 26, batch 500, loss[loss=0.1928, simple_loss=0.2604, pruned_loss=0.06258, over 19769.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2471, pruned_loss=0.04791, over 3630146.29 frames. ], batch size: 247, lr: 6.06e-03, grad_scale: 16.0 +2023-03-28 19:54:21,812 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1643, 3.0290, 1.6785, 3.7654, 3.3108, 3.6555, 3.7429, 2.9255], + device='cuda:0'), covar=tensor([0.0651, 0.0712, 0.2134, 0.0560, 0.0593, 0.0421, 0.0527, 0.0801], + device='cuda:0'), in_proj_covar=tensor([0.0140, 0.0140, 0.0141, 0.0146, 0.0130, 0.0129, 0.0142, 0.0142], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 19:56:03,163 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.854e+02 4.348e+02 5.094e+02 6.034e+02 1.006e+03, threshold=1.019e+03, percent-clipped=1.0 +2023-03-28 19:56:14,640 INFO [train.py:892] (0/4) Epoch 26, batch 550, loss[loss=0.1764, simple_loss=0.2527, pruned_loss=0.05009, over 19711.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2484, pruned_loss=0.04827, over 3700871.44 frames. ], batch size: 78, lr: 6.06e-03, grad_scale: 16.0 +2023-03-28 19:56:27,479 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46932.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:57:13,234 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46951.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 19:57:28,657 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46958.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:57:28,913 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9061, 3.1778, 3.4133, 3.8953, 2.7861, 3.1633, 2.5377, 2.5917], + device='cuda:0'), covar=tensor([0.0489, 0.1881, 0.0925, 0.0368, 0.1800, 0.0824, 0.1353, 0.1594], + device='cuda:0'), in_proj_covar=tensor([0.0235, 0.0334, 0.0244, 0.0196, 0.0242, 0.0203, 0.0212, 0.0214], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 19:58:10,255 INFO [train.py:892] (0/4) Epoch 26, batch 600, loss[loss=0.1687, simple_loss=0.2388, pruned_loss=0.04935, over 19875.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.249, pruned_loss=0.04843, over 3755859.54 frames. ], batch size: 159, lr: 6.06e-03, grad_scale: 16.0 +2023-03-28 19:58:20,886 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46980.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 19:58:51,063 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.14 vs. limit=5.0 +2023-03-28 19:59:54,797 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.697e+02 3.904e+02 4.581e+02 5.783e+02 1.259e+03, threshold=9.163e+02, percent-clipped=1.0 +2023-03-28 20:00:06,807 INFO [train.py:892] (0/4) Epoch 26, batch 650, loss[loss=0.1524, simple_loss=0.2239, pruned_loss=0.0405, over 19808.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2483, pruned_loss=0.04823, over 3798096.23 frames. ], batch size: 132, lr: 6.05e-03, grad_scale: 16.0 +2023-03-28 20:02:09,485 INFO [train.py:892] (0/4) Epoch 26, batch 700, loss[loss=0.1639, simple_loss=0.2386, pruned_loss=0.0446, over 19772.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2479, pruned_loss=0.04788, over 3831545.91 frames. ], batch size: 69, lr: 6.05e-03, grad_scale: 16.0 +2023-03-28 20:03:57,428 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.011e+02 3.881e+02 4.556e+02 5.406e+02 9.543e+02, threshold=9.112e+02, percent-clipped=1.0 +2023-03-28 20:04:09,100 INFO [train.py:892] (0/4) Epoch 26, batch 750, loss[loss=0.2186, simple_loss=0.2921, pruned_loss=0.07252, over 19636.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2474, pruned_loss=0.04785, over 3858728.50 frames. ], batch size: 343, lr: 6.05e-03, grad_scale: 16.0 +2023-03-28 20:05:48,948 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47166.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:05:49,236 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6930, 4.6900, 2.8750, 4.9224, 5.1844, 2.2132, 4.4146, 3.7779], + device='cuda:0'), covar=tensor([0.0522, 0.0682, 0.2434, 0.0683, 0.0407, 0.2656, 0.0838, 0.0756], + device='cuda:0'), in_proj_covar=tensor([0.0230, 0.0253, 0.0228, 0.0268, 0.0248, 0.0202, 0.0238, 0.0194], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 20:06:15,269 INFO [train.py:892] (0/4) Epoch 26, batch 800, loss[loss=0.173, simple_loss=0.2541, pruned_loss=0.04596, over 19764.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2473, pruned_loss=0.04777, over 3878974.30 frames. ], batch size: 49, lr: 6.04e-03, grad_scale: 16.0 +2023-03-28 20:08:13,343 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.013e+02 4.034e+02 4.451e+02 5.296e+02 1.110e+03, threshold=8.902e+02, percent-clipped=2.0 +2023-03-28 20:08:25,316 INFO [train.py:892] (0/4) Epoch 26, batch 850, loss[loss=0.152, simple_loss=0.2318, pruned_loss=0.03614, over 19647.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2479, pruned_loss=0.0482, over 3894634.34 frames. ], batch size: 66, lr: 6.04e-03, grad_scale: 16.0 +2023-03-28 20:09:04,389 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47241.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:09:28,085 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47251.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:09:47,647 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47258.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:10:29,641 INFO [train.py:892] (0/4) Epoch 26, batch 900, loss[loss=0.1696, simple_loss=0.2443, pruned_loss=0.04746, over 19807.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2485, pruned_loss=0.04891, over 3906067.92 frames. ], batch size: 114, lr: 6.04e-03, grad_scale: 16.0 +2023-03-28 20:11:15,813 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-28 20:11:30,377 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47299.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:11:38,852 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47302.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:11:47,673 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47306.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:12:26,907 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.526e+02 3.904e+02 4.611e+02 5.515e+02 1.040e+03, threshold=9.223e+02, percent-clipped=2.0 +2023-03-28 20:12:27,770 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2663, 5.5687, 5.8043, 5.6127, 5.5198, 5.3242, 5.5164, 5.3715], + device='cuda:0'), covar=tensor([0.1459, 0.1358, 0.0806, 0.1067, 0.0644, 0.0800, 0.1725, 0.1898], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0318, 0.0362, 0.0289, 0.0268, 0.0269, 0.0346, 0.0379], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 20:12:38,726 INFO [train.py:892] (0/4) Epoch 26, batch 950, loss[loss=0.16, simple_loss=0.24, pruned_loss=0.04004, over 19635.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.249, pruned_loss=0.04918, over 3916061.34 frames. ], batch size: 68, lr: 6.03e-03, grad_scale: 16.0 +2023-03-28 20:14:46,333 INFO [train.py:892] (0/4) Epoch 26, batch 1000, loss[loss=0.1647, simple_loss=0.2515, pruned_loss=0.03891, over 19792.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2472, pruned_loss=0.04796, over 3923654.34 frames. ], batch size: 45, lr: 6.03e-03, grad_scale: 16.0 +2023-03-28 20:14:54,447 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47379.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:16:35,924 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0712, 4.7449, 4.7850, 5.1337, 4.8434, 5.3325, 5.1961, 5.4153], + device='cuda:0'), covar=tensor([0.0703, 0.0372, 0.0425, 0.0308, 0.0601, 0.0320, 0.0440, 0.0262], + device='cuda:0'), in_proj_covar=tensor([0.0147, 0.0172, 0.0195, 0.0168, 0.0168, 0.0151, 0.0147, 0.0191], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 20:16:43,924 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.712e+02 3.612e+02 4.402e+02 5.356e+02 9.611e+02, threshold=8.803e+02, percent-clipped=1.0 +2023-03-28 20:16:51,656 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2202, 2.9825, 3.0829, 3.2486, 3.1404, 3.1323, 3.3089, 3.4822], + device='cuda:0'), covar=tensor([0.0809, 0.0565, 0.0616, 0.0454, 0.0750, 0.0792, 0.0547, 0.0391], + device='cuda:0'), in_proj_covar=tensor([0.0147, 0.0172, 0.0195, 0.0168, 0.0168, 0.0151, 0.0147, 0.0191], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 20:16:55,237 INFO [train.py:892] (0/4) Epoch 26, batch 1050, loss[loss=0.1695, simple_loss=0.2403, pruned_loss=0.04936, over 19849.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2489, pruned_loss=0.0488, over 3927491.67 frames. ], batch size: 144, lr: 6.03e-03, grad_scale: 16.0 +2023-03-28 20:17:30,558 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47440.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:18:20,222 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3485, 2.6069, 3.5177, 2.9256, 3.0706, 2.9994, 2.1390, 2.3448], + device='cuda:0'), covar=tensor([0.1076, 0.2878, 0.0665, 0.1021, 0.1733, 0.1411, 0.2516, 0.2670], + device='cuda:0'), in_proj_covar=tensor([0.0345, 0.0378, 0.0339, 0.0276, 0.0368, 0.0363, 0.0361, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 20:18:32,979 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47466.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:18:57,854 INFO [train.py:892] (0/4) Epoch 26, batch 1100, loss[loss=0.2149, simple_loss=0.292, pruned_loss=0.06887, over 19526.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2486, pruned_loss=0.04829, over 3933379.41 frames. ], batch size: 54, lr: 6.03e-03, grad_scale: 16.0 +2023-03-28 20:20:34,653 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47514.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:20:53,323 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.834e+02 3.692e+02 4.407e+02 5.502e+02 8.056e+02, threshold=8.815e+02, percent-clipped=0.0 +2023-03-28 20:21:05,818 INFO [train.py:892] (0/4) Epoch 26, batch 1150, loss[loss=0.1829, simple_loss=0.2662, pruned_loss=0.04976, over 19688.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2482, pruned_loss=0.04845, over 3937297.97 frames. ], batch size: 49, lr: 6.02e-03, grad_scale: 16.0 +2023-03-28 20:21:19,474 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-03-28 20:21:27,999 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8771, 4.5531, 4.5672, 4.2783, 4.8147, 3.1747, 3.9089, 2.3353], + device='cuda:0'), covar=tensor([0.0202, 0.0220, 0.0159, 0.0204, 0.0155, 0.0911, 0.0864, 0.1617], + device='cuda:0'), in_proj_covar=tensor([0.0104, 0.0143, 0.0112, 0.0133, 0.0118, 0.0134, 0.0141, 0.0127], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 20:23:12,011 INFO [train.py:892] (0/4) Epoch 26, batch 1200, loss[loss=0.1789, simple_loss=0.2577, pruned_loss=0.05001, over 19714.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2482, pruned_loss=0.04871, over 3941431.24 frames. ], batch size: 295, lr: 6.02e-03, grad_scale: 16.0 +2023-03-28 20:23:51,535 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9587, 2.8769, 3.0809, 2.5203, 3.1937, 2.6077, 2.9343, 3.0265], + device='cuda:0'), covar=tensor([0.0462, 0.0480, 0.0502, 0.0747, 0.0324, 0.0504, 0.0488, 0.0369], + device='cuda:0'), in_proj_covar=tensor([0.0072, 0.0080, 0.0077, 0.0105, 0.0074, 0.0076, 0.0074, 0.0067], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 20:24:01,124 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1212, 2.2573, 2.3581, 2.2069, 2.2343, 2.3257, 2.2866, 2.3822], + device='cuda:0'), covar=tensor([0.0359, 0.0351, 0.0297, 0.0290, 0.0447, 0.0318, 0.0404, 0.0281], + device='cuda:0'), in_proj_covar=tensor([0.0079, 0.0072, 0.0075, 0.0069, 0.0083, 0.0076, 0.0094, 0.0067], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 20:24:05,310 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47597.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:25:05,923 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.537e+02 4.111e+02 4.755e+02 5.836e+02 8.359e+02, threshold=9.510e+02, percent-clipped=0.0 +2023-03-28 20:25:19,221 INFO [train.py:892] (0/4) Epoch 26, batch 1250, loss[loss=0.1435, simple_loss=0.2245, pruned_loss=0.03128, over 19737.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2462, pruned_loss=0.0477, over 3944643.71 frames. ], batch size: 99, lr: 6.02e-03, grad_scale: 16.0 +2023-03-28 20:27:24,888 INFO [train.py:892] (0/4) Epoch 26, batch 1300, loss[loss=0.1481, simple_loss=0.2205, pruned_loss=0.03787, over 19607.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2466, pruned_loss=0.04773, over 3945054.62 frames. ], batch size: 46, lr: 6.01e-03, grad_scale: 16.0 +2023-03-28 20:29:19,680 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.334e+02 3.779e+02 4.339e+02 5.421e+02 8.530e+02, threshold=8.679e+02, percent-clipped=0.0 +2023-03-28 20:29:34,006 INFO [train.py:892] (0/4) Epoch 26, batch 1350, loss[loss=0.1696, simple_loss=0.2438, pruned_loss=0.04767, over 19797.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2457, pruned_loss=0.0474, over 3947744.49 frames. ], batch size: 224, lr: 6.01e-03, grad_scale: 16.0 +2023-03-28 20:29:56,210 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7506, 4.3969, 4.4643, 4.7337, 4.4383, 4.8981, 4.8828, 5.0223], + device='cuda:0'), covar=tensor([0.0661, 0.0390, 0.0510, 0.0334, 0.0604, 0.0403, 0.0423, 0.0338], + device='cuda:0'), in_proj_covar=tensor([0.0146, 0.0171, 0.0194, 0.0167, 0.0166, 0.0150, 0.0145, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 20:29:58,922 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47735.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:30:06,613 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47738.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:31:42,324 INFO [train.py:892] (0/4) Epoch 26, batch 1400, loss[loss=0.1625, simple_loss=0.235, pruned_loss=0.04501, over 19881.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2447, pruned_loss=0.0468, over 3949256.94 frames. ], batch size: 134, lr: 6.01e-03, grad_scale: 16.0 +2023-03-28 20:32:13,697 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47788.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:32:43,687 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47799.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:33:05,561 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47809.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 20:33:29,897 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.973e+02 4.160e+02 4.762e+02 6.051e+02 1.589e+03, threshold=9.525e+02, percent-clipped=3.0 +2023-03-28 20:33:39,294 INFO [train.py:892] (0/4) Epoch 26, batch 1450, loss[loss=0.2109, simple_loss=0.2832, pruned_loss=0.06929, over 19786.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2451, pruned_loss=0.04668, over 3949122.90 frames. ], batch size: 263, lr: 6.00e-03, grad_scale: 16.0 +2023-03-28 20:34:38,802 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47849.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:35:29,373 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47870.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 20:35:46,884 INFO [train.py:892] (0/4) Epoch 26, batch 1500, loss[loss=0.1493, simple_loss=0.2255, pruned_loss=0.03659, over 19857.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2457, pruned_loss=0.04696, over 3948033.44 frames. ], batch size: 112, lr: 6.00e-03, grad_scale: 16.0 +2023-03-28 20:36:33,842 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-28 20:36:41,723 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47897.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:37:14,539 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2364, 4.8589, 4.9289, 4.7031, 5.1857, 3.3881, 4.2436, 2.6015], + device='cuda:0'), covar=tensor([0.0162, 0.0174, 0.0124, 0.0171, 0.0126, 0.0840, 0.0824, 0.1422], + device='cuda:0'), in_proj_covar=tensor([0.0104, 0.0143, 0.0112, 0.0133, 0.0118, 0.0134, 0.0142, 0.0127], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 20:37:43,345 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.594e+02 4.057e+02 4.781e+02 5.680e+02 8.130e+02, threshold=9.563e+02, percent-clipped=0.0 +2023-03-28 20:37:54,769 INFO [train.py:892] (0/4) Epoch 26, batch 1550, loss[loss=0.163, simple_loss=0.2382, pruned_loss=0.04388, over 19800.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2469, pruned_loss=0.04749, over 3948418.84 frames. ], batch size: 200, lr: 6.00e-03, grad_scale: 32.0 +2023-03-28 20:38:45,541 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47945.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:39:59,941 INFO [train.py:892] (0/4) Epoch 26, batch 1600, loss[loss=0.1829, simple_loss=0.2532, pruned_loss=0.05629, over 19755.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2465, pruned_loss=0.04701, over 3948509.88 frames. ], batch size: 179, lr: 5.99e-03, grad_scale: 32.0 +2023-03-28 20:40:58,800 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-48000.pt +2023-03-28 20:41:53,733 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.602e+02 3.888e+02 4.549e+02 5.335e+02 7.774e+02, threshold=9.097e+02, percent-clipped=0.0 +2023-03-28 20:42:08,505 INFO [train.py:892] (0/4) Epoch 26, batch 1650, loss[loss=0.1811, simple_loss=0.2476, pruned_loss=0.05727, over 19800.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2462, pruned_loss=0.0468, over 3950097.96 frames. ], batch size: 150, lr: 5.99e-03, grad_scale: 32.0 +2023-03-28 20:42:31,638 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48035.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:44:04,954 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48072.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:44:15,500 INFO [train.py:892] (0/4) Epoch 26, batch 1700, loss[loss=0.2009, simple_loss=0.2721, pruned_loss=0.06487, over 19802.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2478, pruned_loss=0.04748, over 3948673.71 frames. ], batch size: 65, lr: 5.99e-03, grad_scale: 32.0 +2023-03-28 20:44:34,004 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48083.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:45:02,922 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48094.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:45:31,769 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.8115, 6.1355, 6.1301, 6.0618, 5.8931, 6.1649, 5.4616, 5.5272], + device='cuda:0'), covar=tensor([0.0359, 0.0433, 0.0440, 0.0344, 0.0469, 0.0502, 0.0596, 0.0885], + device='cuda:0'), in_proj_covar=tensor([0.0256, 0.0271, 0.0284, 0.0248, 0.0253, 0.0238, 0.0255, 0.0302], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 20:46:06,350 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.958e+02 3.839e+02 4.571e+02 5.458e+02 1.264e+03, threshold=9.143e+02, percent-clipped=4.0 +2023-03-28 20:46:18,118 INFO [train.py:892] (0/4) Epoch 26, batch 1750, loss[loss=0.1667, simple_loss=0.24, pruned_loss=0.04666, over 19770.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2462, pruned_loss=0.0469, over 3949140.03 frames. ], batch size: 213, lr: 5.98e-03, grad_scale: 32.0 +2023-03-28 20:46:34,777 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48133.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:46:58,172 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48144.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:47:09,421 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.7644, 1.9700, 1.9061, 1.6877, 1.9744, 1.7026, 1.9539, 1.9664], + device='cuda:0'), covar=tensor([0.0503, 0.0503, 0.0488, 0.0927, 0.0433, 0.0524, 0.0514, 0.0420], + device='cuda:0'), in_proj_covar=tensor([0.0074, 0.0081, 0.0079, 0.0107, 0.0076, 0.0077, 0.0075, 0.0068], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 20:47:41,413 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48165.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 20:48:01,804 INFO [train.py:892] (0/4) Epoch 26, batch 1800, loss[loss=0.1742, simple_loss=0.2597, pruned_loss=0.04437, over 19575.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.247, pruned_loss=0.04714, over 3947062.28 frames. ], batch size: 53, lr: 5.98e-03, grad_scale: 16.0 +2023-03-28 20:49:10,914 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48212.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 20:49:30,284 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.294e+02 3.982e+02 4.834e+02 5.879e+02 1.454e+03, threshold=9.667e+02, percent-clipped=6.0 +2023-03-28 20:49:38,197 INFO [train.py:892] (0/4) Epoch 26, batch 1850, loss[loss=0.1864, simple_loss=0.2669, pruned_loss=0.05293, over 19842.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2482, pruned_loss=0.0472, over 3947700.00 frames. ], batch size: 58, lr: 5.98e-03, grad_scale: 16.0 +2023-03-28 20:49:46,406 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-26.pt +2023-03-28 20:50:46,058 INFO [train.py:892] (0/4) Epoch 27, batch 0, loss[loss=0.1571, simple_loss=0.2207, pruned_loss=0.04681, over 19837.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2207, pruned_loss=0.04681, over 19837.00 frames. ], batch size: 142, lr: 5.86e-03, grad_scale: 16.0 +2023-03-28 20:50:46,059 INFO [train.py:917] (0/4) Computing validation loss +2023-03-28 20:51:17,574 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8583, 3.8877, 4.1089, 3.8603, 3.7500, 3.9675, 3.7449, 4.1290], + device='cuda:0'), covar=tensor([0.0668, 0.0313, 0.0341, 0.0352, 0.0714, 0.0500, 0.0533, 0.0328], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0220, 0.0221, 0.0232, 0.0205, 0.0233, 0.0230, 0.0212], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 20:51:19,428 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.7725, 1.5796, 1.7738, 1.6870, 1.7071, 1.7216, 1.7269, 1.7561], + device='cuda:0'), covar=tensor([0.0339, 0.0343, 0.0330, 0.0319, 0.0441, 0.0343, 0.0452, 0.0392], + device='cuda:0'), in_proj_covar=tensor([0.0080, 0.0074, 0.0077, 0.0070, 0.0086, 0.0078, 0.0095, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 20:51:25,320 INFO [train.py:926] (0/4) Epoch 27, validation: loss=0.1767, simple_loss=0.2485, pruned_loss=0.05248, over 2883724.00 frames. +2023-03-28 20:51:25,321 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-28 20:51:34,096 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0502, 3.7637, 3.9185, 4.0638, 3.8024, 4.0226, 4.1912, 4.3577], + device='cuda:0'), covar=tensor([0.0683, 0.0465, 0.0546, 0.0375, 0.0726, 0.0684, 0.0465, 0.0330], + device='cuda:0'), in_proj_covar=tensor([0.0150, 0.0174, 0.0197, 0.0172, 0.0170, 0.0155, 0.0147, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 20:52:16,663 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-03-28 20:53:19,568 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48273.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 20:53:38,993 INFO [train.py:892] (0/4) Epoch 27, batch 50, loss[loss=0.1911, simple_loss=0.2664, pruned_loss=0.05792, over 19854.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2469, pruned_loss=0.04797, over 892382.98 frames. ], batch size: 208, lr: 5.86e-03, grad_scale: 16.0 +2023-03-28 20:54:10,989 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.8191, 6.1772, 6.1796, 6.1283, 5.8400, 6.1913, 5.4660, 5.5543], + device='cuda:0'), covar=tensor([0.0353, 0.0438, 0.0450, 0.0349, 0.0523, 0.0529, 0.0747, 0.0947], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0273, 0.0288, 0.0250, 0.0255, 0.0240, 0.0259, 0.0305], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 20:55:21,762 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.295e+02 3.761e+02 4.503e+02 5.543e+02 9.672e+02, threshold=9.006e+02, percent-clipped=1.0 +2023-03-28 20:55:27,258 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48324.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:55:43,315 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5046, 2.6589, 4.3903, 3.7984, 4.1649, 4.3072, 4.2441, 4.0903], + device='cuda:0'), covar=tensor([0.0484, 0.0929, 0.0108, 0.0719, 0.0153, 0.0209, 0.0166, 0.0165], + device='cuda:0'), in_proj_covar=tensor([0.0096, 0.0101, 0.0085, 0.0151, 0.0081, 0.0095, 0.0088, 0.0084], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 20:55:44,763 INFO [train.py:892] (0/4) Epoch 27, batch 100, loss[loss=0.1342, simple_loss=0.2172, pruned_loss=0.02564, over 19774.00 frames. ], tot_loss[loss=0.1661, simple_loss=0.2424, pruned_loss=0.04494, over 1571008.27 frames. ], batch size: 94, lr: 5.86e-03, grad_scale: 16.0 +2023-03-28 20:55:55,457 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.2321, 2.2755, 1.5102, 2.4043, 2.2958, 2.3148, 2.4156, 1.9642], + device='cuda:0'), covar=tensor([0.0697, 0.0784, 0.1305, 0.0604, 0.0700, 0.0599, 0.0579, 0.1023], + device='cuda:0'), in_proj_covar=tensor([0.0139, 0.0139, 0.0140, 0.0146, 0.0130, 0.0129, 0.0142, 0.0142], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 20:57:46,611 INFO [train.py:892] (0/4) Epoch 27, batch 150, loss[loss=0.1663, simple_loss=0.2422, pruned_loss=0.04516, over 19812.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.245, pruned_loss=0.04576, over 2096362.24 frames. ], batch size: 98, lr: 5.86e-03, grad_scale: 16.0 +2023-03-28 20:57:56,692 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48385.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:58:23,174 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48394.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:58:30,093 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48397.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:59:26,410 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.124e+02 3.642e+02 4.290e+02 5.743e+02 1.049e+03, threshold=8.581e+02, percent-clipped=1.0 +2023-03-28 20:59:30,014 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48423.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:59:40,798 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48428.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 20:59:49,344 INFO [train.py:892] (0/4) Epoch 27, batch 200, loss[loss=0.1645, simple_loss=0.2502, pruned_loss=0.03945, over 19586.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.2446, pruned_loss=0.04527, over 2507054.92 frames. ], batch size: 44, lr: 5.85e-03, grad_scale: 16.0 +2023-03-28 21:00:15,231 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48442.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:00:19,928 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48444.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:00:53,074 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48458.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:01:08,380 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48465.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 21:01:09,027 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-28 21:01:49,445 INFO [train.py:892] (0/4) Epoch 27, batch 250, loss[loss=0.1678, simple_loss=0.2554, pruned_loss=0.04007, over 19577.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2431, pruned_loss=0.04449, over 2828088.41 frames. ], batch size: 49, lr: 5.85e-03, grad_scale: 16.0 +2023-03-28 21:01:58,056 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48484.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:02:17,114 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48492.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:03:11,024 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48513.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 21:03:32,742 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.677e+02 3.668e+02 4.339e+02 5.007e+02 7.630e+02, threshold=8.678e+02, percent-clipped=0.0 +2023-03-28 21:03:56,494 INFO [train.py:892] (0/4) Epoch 27, batch 300, loss[loss=0.1483, simple_loss=0.228, pruned_loss=0.03432, over 19822.00 frames. ], tot_loss[loss=0.1671, simple_loss=0.2444, pruned_loss=0.04487, over 3076570.40 frames. ], batch size: 187, lr: 5.85e-03, grad_scale: 16.0 +2023-03-28 21:05:08,358 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9183, 4.9533, 5.3312, 5.0529, 5.2351, 4.8163, 5.0566, 4.7921], + device='cuda:0'), covar=tensor([0.1333, 0.1316, 0.0805, 0.1212, 0.0689, 0.0861, 0.1667, 0.2077], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0317, 0.0357, 0.0287, 0.0268, 0.0268, 0.0342, 0.0381], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 21:05:21,968 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48568.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 21:05:49,793 INFO [train.py:892] (0/4) Epoch 27, batch 350, loss[loss=0.1469, simple_loss=0.2239, pruned_loss=0.03497, over 19774.00 frames. ], tot_loss[loss=0.1671, simple_loss=0.2445, pruned_loss=0.04482, over 3271403.55 frames. ], batch size: 46, lr: 5.84e-03, grad_scale: 16.0 +2023-03-28 21:06:20,343 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7689, 2.8059, 1.6761, 3.2105, 3.0360, 3.1627, 3.2828, 2.6297], + device='cuda:0'), covar=tensor([0.0692, 0.0675, 0.1637, 0.0737, 0.0707, 0.0491, 0.0600, 0.0838], + device='cuda:0'), in_proj_covar=tensor([0.0140, 0.0140, 0.0140, 0.0147, 0.0130, 0.0129, 0.0142, 0.0142], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 21:07:35,027 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.631e+02 3.784e+02 4.446e+02 5.402e+02 1.015e+03, threshold=8.893e+02, percent-clipped=1.0 +2023-03-28 21:07:58,131 INFO [train.py:892] (0/4) Epoch 27, batch 400, loss[loss=0.1636, simple_loss=0.2435, pruned_loss=0.04182, over 19837.00 frames. ], tot_loss[loss=0.1675, simple_loss=0.2445, pruned_loss=0.04527, over 3421734.57 frames. ], batch size: 58, lr: 5.84e-03, grad_scale: 16.0 +2023-03-28 21:08:30,279 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1357, 3.0693, 4.5687, 3.4500, 3.7436, 3.5504, 2.5792, 2.6239], + device='cuda:0'), covar=tensor([0.0875, 0.3095, 0.0433, 0.0942, 0.1615, 0.1330, 0.2376, 0.2639], + device='cuda:0'), in_proj_covar=tensor([0.0348, 0.0380, 0.0339, 0.0276, 0.0369, 0.0362, 0.0362, 0.0331], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 21:09:20,975 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.9445, 2.2515, 2.0197, 1.3986, 2.0434, 2.1298, 2.0486, 2.1903], + device='cuda:0'), covar=tensor([0.0397, 0.0311, 0.0321, 0.0614, 0.0422, 0.0304, 0.0306, 0.0275], + device='cuda:0'), in_proj_covar=tensor([0.0096, 0.0091, 0.0095, 0.0099, 0.0101, 0.0081, 0.0080, 0.0081], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 21:10:02,214 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48680.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:10:04,249 INFO [train.py:892] (0/4) Epoch 27, batch 450, loss[loss=0.1674, simple_loss=0.2446, pruned_loss=0.04508, over 19776.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2449, pruned_loss=0.0457, over 3538526.61 frames. ], batch size: 233, lr: 5.84e-03, grad_scale: 16.0 +2023-03-28 21:10:31,209 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.3564, 2.7638, 2.3990, 1.8546, 2.3640, 2.5085, 2.5563, 2.6714], + device='cuda:0'), covar=tensor([0.0351, 0.0311, 0.0302, 0.0584, 0.0413, 0.0302, 0.0292, 0.0231], + device='cuda:0'), in_proj_covar=tensor([0.0097, 0.0091, 0.0095, 0.0099, 0.0102, 0.0081, 0.0081, 0.0081], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 21:12:00,652 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0266, 4.6247, 4.6694, 4.4397, 4.9415, 3.2456, 4.0477, 2.5644], + device='cuda:0'), covar=tensor([0.0163, 0.0192, 0.0140, 0.0177, 0.0129, 0.0905, 0.0834, 0.1395], + device='cuda:0'), in_proj_covar=tensor([0.0102, 0.0142, 0.0111, 0.0132, 0.0118, 0.0133, 0.0140, 0.0126], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 21:12:01,742 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.486e+02 3.534e+02 4.421e+02 5.220e+02 9.459e+02, threshold=8.842e+02, percent-clipped=1.0 +2023-03-28 21:12:21,099 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48728.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:12:28,177 INFO [train.py:892] (0/4) Epoch 27, batch 500, loss[loss=0.1811, simple_loss=0.2544, pruned_loss=0.05386, over 19738.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.2443, pruned_loss=0.04547, over 3629123.91 frames. ], batch size: 205, lr: 5.83e-03, grad_scale: 16.0 +2023-03-28 21:13:22,833 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48753.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:14:22,453 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48776.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:14:30,760 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48779.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:14:34,625 INFO [train.py:892] (0/4) Epoch 27, batch 550, loss[loss=0.1634, simple_loss=0.2316, pruned_loss=0.04755, over 19725.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2448, pruned_loss=0.04583, over 3701270.88 frames. ], batch size: 71, lr: 5.83e-03, grad_scale: 16.0 +2023-03-28 21:14:35,860 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6386, 2.4402, 2.8488, 2.5547, 2.9956, 2.9038, 3.5556, 3.8220], + device='cuda:0'), covar=tensor([0.0668, 0.1832, 0.1617, 0.2126, 0.1589, 0.1621, 0.0592, 0.0595], + device='cuda:0'), in_proj_covar=tensor([0.0248, 0.0237, 0.0261, 0.0250, 0.0291, 0.0253, 0.0226, 0.0248], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 21:15:14,841 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-03-28 21:16:20,414 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48821.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:16:22,694 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.382e+02 4.009e+02 4.912e+02 5.862e+02 1.286e+03, threshold=9.824e+02, percent-clipped=2.0 +2023-03-28 21:16:44,185 INFO [train.py:892] (0/4) Epoch 27, batch 600, loss[loss=0.1587, simple_loss=0.2246, pruned_loss=0.04638, over 19827.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2454, pruned_loss=0.04633, over 3753599.21 frames. ], batch size: 146, lr: 5.83e-03, grad_scale: 16.0 +2023-03-28 21:16:50,524 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-03-28 21:17:02,146 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48838.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:18:12,776 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48868.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 21:18:21,367 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.87 vs. limit=5.0 +2023-03-28 21:18:42,153 INFO [train.py:892] (0/4) Epoch 27, batch 650, loss[loss=0.1636, simple_loss=0.2368, pruned_loss=0.04522, over 19800.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2462, pruned_loss=0.04716, over 3797513.82 frames. ], batch size: 150, lr: 5.83e-03, grad_scale: 16.0 +2023-03-28 21:18:46,908 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48882.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:19:27,555 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48899.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:20:07,010 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48916.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 21:20:19,907 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.871e+02 3.831e+02 4.540e+02 6.098e+02 1.137e+03, threshold=9.081e+02, percent-clipped=2.0 +2023-03-28 21:20:42,900 INFO [train.py:892] (0/4) Epoch 27, batch 700, loss[loss=0.1616, simple_loss=0.233, pruned_loss=0.04515, over 19764.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2472, pruned_loss=0.04769, over 3829843.30 frames. ], batch size: 155, lr: 5.82e-03, grad_scale: 16.0 +2023-03-28 21:21:50,364 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4023, 3.5951, 2.1789, 4.2489, 3.7630, 4.2157, 4.2371, 3.2560], + device='cuda:0'), covar=tensor([0.0614, 0.0548, 0.1400, 0.0551, 0.0566, 0.0360, 0.0578, 0.0787], + device='cuda:0'), in_proj_covar=tensor([0.0140, 0.0141, 0.0140, 0.0147, 0.0130, 0.0129, 0.0143, 0.0142], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 21:22:47,998 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48980.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:22:49,433 INFO [train.py:892] (0/4) Epoch 27, batch 750, loss[loss=0.1706, simple_loss=0.2513, pruned_loss=0.04501, over 19745.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2467, pruned_loss=0.04692, over 3856536.82 frames. ], batch size: 276, lr: 5.82e-03, grad_scale: 16.0 +2023-03-28 21:24:15,723 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8949, 2.8291, 4.8028, 4.0370, 4.4721, 4.7532, 4.6077, 4.4240], + device='cuda:0'), covar=tensor([0.0424, 0.0909, 0.0089, 0.0972, 0.0153, 0.0167, 0.0145, 0.0143], + device='cuda:0'), in_proj_covar=tensor([0.0097, 0.0102, 0.0086, 0.0153, 0.0082, 0.0097, 0.0089, 0.0084], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 21:24:22,759 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7454, 3.0812, 3.1578, 3.7185, 2.6728, 3.1537, 2.2988, 2.3137], + device='cuda:0'), covar=tensor([0.0587, 0.1911, 0.1158, 0.0441, 0.2026, 0.0838, 0.1515, 0.1788], + device='cuda:0'), in_proj_covar=tensor([0.0237, 0.0333, 0.0245, 0.0196, 0.0246, 0.0204, 0.0214, 0.0215], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 21:24:30,198 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-03-28 21:24:30,744 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.511e+02 3.742e+02 4.626e+02 5.475e+02 1.269e+03, threshold=9.252e+02, percent-clipped=2.0 +2023-03-28 21:24:48,705 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49028.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:24:56,453 INFO [train.py:892] (0/4) Epoch 27, batch 800, loss[loss=0.1827, simple_loss=0.2616, pruned_loss=0.05189, over 19712.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2466, pruned_loss=0.04672, over 3875968.02 frames. ], batch size: 305, lr: 5.82e-03, grad_scale: 16.0 +2023-03-28 21:25:41,930 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7820, 4.3943, 4.4725, 4.2278, 4.7346, 3.1178, 3.7898, 2.3201], + device='cuda:0'), covar=tensor([0.0215, 0.0242, 0.0186, 0.0234, 0.0181, 0.1071, 0.0949, 0.1776], + device='cuda:0'), in_proj_covar=tensor([0.0102, 0.0142, 0.0111, 0.0131, 0.0117, 0.0133, 0.0140, 0.0126], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 21:25:51,919 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49053.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:26:10,421 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8348, 3.0990, 3.1484, 3.0132, 2.8933, 2.9435, 2.8552, 3.1641], + device='cuda:0'), covar=tensor([0.0254, 0.0304, 0.0260, 0.0230, 0.0388, 0.0323, 0.0415, 0.0316], + device='cuda:0'), in_proj_covar=tensor([0.0081, 0.0075, 0.0077, 0.0072, 0.0085, 0.0079, 0.0096, 0.0068], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 21:26:58,331 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49079.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:27:02,195 INFO [train.py:892] (0/4) Epoch 27, batch 850, loss[loss=0.1779, simple_loss=0.2545, pruned_loss=0.05068, over 19781.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2477, pruned_loss=0.04711, over 3891825.15 frames. ], batch size: 263, lr: 5.81e-03, grad_scale: 16.0 +2023-03-28 21:27:50,767 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49101.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:27:50,912 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9691, 4.6797, 4.6816, 4.9659, 4.6457, 5.1732, 5.1323, 5.3107], + device='cuda:0'), covar=tensor([0.0640, 0.0335, 0.0468, 0.0369, 0.0685, 0.0397, 0.0374, 0.0308], + device='cuda:0'), in_proj_covar=tensor([0.0149, 0.0172, 0.0196, 0.0170, 0.0169, 0.0154, 0.0147, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 21:28:42,758 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.493e+02 3.766e+02 4.379e+02 5.390e+02 1.134e+03, threshold=8.757e+02, percent-clipped=1.0 +2023-03-28 21:28:56,202 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49127.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:29:04,462 INFO [train.py:892] (0/4) Epoch 27, batch 900, loss[loss=0.1572, simple_loss=0.2316, pruned_loss=0.04136, over 19810.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2472, pruned_loss=0.04703, over 3906181.49 frames. ], batch size: 47, lr: 5.81e-03, grad_scale: 16.0 +2023-03-28 21:30:36,332 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.48 vs. limit=5.0 +2023-03-28 21:31:01,795 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49177.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:31:10,444 INFO [train.py:892] (0/4) Epoch 27, batch 950, loss[loss=0.165, simple_loss=0.2347, pruned_loss=0.0476, over 19730.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2471, pruned_loss=0.04659, over 3913392.86 frames. ], batch size: 118, lr: 5.81e-03, grad_scale: 16.0 +2023-03-28 21:31:41,723 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49194.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:32:49,455 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.549e+02 3.706e+02 4.158e+02 5.021e+02 8.010e+02, threshold=8.316e+02, percent-clipped=0.0 +2023-03-28 21:33:13,375 INFO [train.py:892] (0/4) Epoch 27, batch 1000, loss[loss=0.1784, simple_loss=0.2488, pruned_loss=0.05403, over 19757.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2462, pruned_loss=0.04626, over 3922089.10 frames. ], batch size: 273, lr: 5.81e-03, grad_scale: 16.0 +2023-03-28 21:33:25,406 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49236.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:34:24,186 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4779, 2.5457, 4.5158, 3.8493, 4.1369, 4.3771, 4.2678, 4.1231], + device='cuda:0'), covar=tensor([0.0561, 0.1051, 0.0124, 0.0873, 0.0166, 0.0241, 0.0196, 0.0179], + device='cuda:0'), in_proj_covar=tensor([0.0097, 0.0101, 0.0085, 0.0152, 0.0081, 0.0096, 0.0088, 0.0084], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 21:34:33,496 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8305, 2.8418, 2.9564, 2.4626, 2.9335, 2.5869, 2.8698, 2.9278], + device='cuda:0'), covar=tensor([0.0530, 0.0431, 0.0476, 0.0710, 0.0433, 0.0452, 0.0476, 0.0350], + device='cuda:0'), in_proj_covar=tensor([0.0074, 0.0082, 0.0079, 0.0107, 0.0076, 0.0078, 0.0075, 0.0068], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 21:34:33,541 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1216, 2.4672, 2.2122, 1.6580, 2.2971, 2.3782, 2.3175, 2.4423], + device='cuda:0'), covar=tensor([0.0406, 0.0299, 0.0319, 0.0606, 0.0392, 0.0298, 0.0287, 0.0247], + device='cuda:0'), in_proj_covar=tensor([0.0097, 0.0091, 0.0095, 0.0099, 0.0102, 0.0081, 0.0080, 0.0082], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 21:35:10,504 INFO [train.py:892] (0/4) Epoch 27, batch 1050, loss[loss=0.175, simple_loss=0.246, pruned_loss=0.05206, over 19800.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2474, pruned_loss=0.04671, over 3926611.08 frames. ], batch size: 120, lr: 5.80e-03, grad_scale: 16.0 +2023-03-28 21:35:54,657 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49297.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:36:54,128 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.170e+02 3.715e+02 4.308e+02 4.982e+02 8.833e+02, threshold=8.615e+02, percent-clipped=1.0 +2023-03-28 21:37:20,640 INFO [train.py:892] (0/4) Epoch 27, batch 1100, loss[loss=0.1536, simple_loss=0.2287, pruned_loss=0.03924, over 19900.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2467, pruned_loss=0.04668, over 3933101.23 frames. ], batch size: 94, lr: 5.80e-03, grad_scale: 16.0 +2023-03-28 21:37:47,003 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1880, 4.4417, 4.4510, 4.3290, 4.1358, 4.3848, 3.9959, 3.9755], + device='cuda:0'), covar=tensor([0.0510, 0.0487, 0.0518, 0.0476, 0.0648, 0.0591, 0.0680, 0.1004], + device='cuda:0'), in_proj_covar=tensor([0.0254, 0.0270, 0.0283, 0.0248, 0.0254, 0.0237, 0.0255, 0.0301], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 21:39:23,297 INFO [train.py:892] (0/4) Epoch 27, batch 1150, loss[loss=0.1911, simple_loss=0.2801, pruned_loss=0.05108, over 19521.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2486, pruned_loss=0.04754, over 3933730.39 frames. ], batch size: 54, lr: 5.80e-03, grad_scale: 16.0 +2023-03-28 21:41:04,926 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.904e+02 3.848e+02 4.568e+02 5.710e+02 9.674e+02, threshold=9.135e+02, percent-clipped=2.0 +2023-03-28 21:41:05,867 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49422.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:41:28,124 INFO [train.py:892] (0/4) Epoch 27, batch 1200, loss[loss=0.1845, simple_loss=0.2532, pruned_loss=0.05786, over 19603.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2481, pruned_loss=0.04746, over 3937090.74 frames. ], batch size: 50, lr: 5.79e-03, grad_scale: 16.0 +2023-03-28 21:41:31,315 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49432.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 21:43:12,814 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-28 21:43:26,672 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.5435, 4.9753, 5.1079, 4.8853, 5.4135, 3.5266, 4.2641, 2.9229], + device='cuda:0'), covar=tensor([0.0165, 0.0193, 0.0127, 0.0164, 0.0136, 0.0820, 0.0945, 0.1301], + device='cuda:0'), in_proj_covar=tensor([0.0102, 0.0143, 0.0112, 0.0132, 0.0117, 0.0133, 0.0140, 0.0126], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 21:43:28,732 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49477.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:43:35,930 INFO [train.py:892] (0/4) Epoch 27, batch 1250, loss[loss=0.1535, simple_loss=0.2299, pruned_loss=0.03856, over 19803.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.247, pruned_loss=0.04689, over 3939613.89 frames. ], batch size: 82, lr: 5.79e-03, grad_scale: 16.0 +2023-03-28 21:43:41,643 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49483.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:44:06,145 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49493.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 21:44:08,473 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49494.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:45:21,358 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.439e+02 3.880e+02 4.551e+02 5.249e+02 9.846e+02, threshold=9.103e+02, percent-clipped=1.0 +2023-03-28 21:45:29,334 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49525.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:45:40,206 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6255, 4.8869, 4.9413, 4.8449, 4.5838, 4.9194, 4.4668, 4.4762], + device='cuda:0'), covar=tensor([0.0510, 0.0516, 0.0513, 0.0472, 0.0638, 0.0560, 0.0719, 0.1057], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0272, 0.0287, 0.0251, 0.0256, 0.0239, 0.0257, 0.0304], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 21:45:44,586 INFO [train.py:892] (0/4) Epoch 27, batch 1300, loss[loss=0.1631, simple_loss=0.2366, pruned_loss=0.04483, over 19678.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2459, pruned_loss=0.04671, over 3943332.76 frames. ], batch size: 52, lr: 5.79e-03, grad_scale: 16.0 +2023-03-28 21:45:47,743 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3666, 4.2621, 4.7145, 4.2718, 3.9918, 4.5130, 4.2787, 4.7594], + device='cuda:0'), covar=tensor([0.0755, 0.0344, 0.0296, 0.0371, 0.0863, 0.0468, 0.0471, 0.0297], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0220, 0.0218, 0.0231, 0.0206, 0.0234, 0.0228, 0.0212], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 21:46:13,855 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49542.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:46:58,351 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-28 21:46:58,412 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-28 21:47:18,831 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4995, 2.4241, 4.3728, 3.7610, 4.2221, 4.4062, 4.2766, 4.1471], + device='cuda:0'), covar=tensor([0.0467, 0.1024, 0.0097, 0.0680, 0.0135, 0.0172, 0.0148, 0.0154], + device='cuda:0'), in_proj_covar=tensor([0.0098, 0.0102, 0.0086, 0.0153, 0.0082, 0.0097, 0.0089, 0.0084], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 21:47:50,721 INFO [train.py:892] (0/4) Epoch 27, batch 1350, loss[loss=0.1795, simple_loss=0.2509, pruned_loss=0.05404, over 19799.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2466, pruned_loss=0.0468, over 3944920.59 frames. ], batch size: 195, lr: 5.78e-03, grad_scale: 16.0 +2023-03-28 21:48:02,890 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9225, 2.7676, 3.1165, 2.7762, 3.2151, 3.1470, 3.7647, 4.1681], + device='cuda:0'), covar=tensor([0.0647, 0.1695, 0.1604, 0.2184, 0.1754, 0.1610, 0.0652, 0.0654], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0242, 0.0268, 0.0256, 0.0297, 0.0258, 0.0232, 0.0254], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 21:48:19,841 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49592.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:48:41,997 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1994, 3.2870, 2.0697, 3.3565, 3.4392, 1.6538, 2.8568, 2.6186], + device='cuda:0'), covar=tensor([0.0832, 0.0779, 0.2664, 0.0811, 0.0593, 0.2545, 0.1116, 0.0966], + device='cuda:0'), in_proj_covar=tensor([0.0230, 0.0252, 0.0228, 0.0266, 0.0247, 0.0200, 0.0236, 0.0192], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 21:49:30,804 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.556e+02 4.075e+02 4.881e+02 5.911e+02 1.218e+03, threshold=9.761e+02, percent-clipped=5.0 +2023-03-28 21:49:51,502 INFO [train.py:892] (0/4) Epoch 27, batch 1400, loss[loss=0.1979, simple_loss=0.2749, pruned_loss=0.06042, over 19698.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.247, pruned_loss=0.04682, over 3945239.03 frames. ], batch size: 283, lr: 5.78e-03, grad_scale: 16.0 +2023-03-28 21:49:54,061 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2446, 5.5399, 5.5600, 5.4196, 5.2419, 5.5128, 4.9788, 5.0080], + device='cuda:0'), covar=tensor([0.0386, 0.0408, 0.0447, 0.0401, 0.0536, 0.0532, 0.0656, 0.0934], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0271, 0.0287, 0.0251, 0.0256, 0.0239, 0.0257, 0.0304], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 21:50:02,192 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.1715, 4.7255, 4.8562, 4.5945, 5.0926, 3.3291, 4.1080, 2.6080], + device='cuda:0'), covar=tensor([0.0162, 0.0194, 0.0131, 0.0164, 0.0118, 0.0861, 0.0833, 0.1411], + device='cuda:0'), in_proj_covar=tensor([0.0102, 0.0143, 0.0112, 0.0132, 0.0117, 0.0133, 0.0140, 0.0126], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 21:51:48,457 INFO [train.py:892] (0/4) Epoch 27, batch 1450, loss[loss=0.1622, simple_loss=0.2408, pruned_loss=0.04184, over 19817.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2466, pruned_loss=0.04649, over 3945669.70 frames. ], batch size: 147, lr: 5.78e-03, grad_scale: 16.0 +2023-03-28 21:51:49,516 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5439, 3.3462, 3.3938, 3.5997, 3.4409, 3.5160, 3.7005, 3.8936], + device='cuda:0'), covar=tensor([0.0732, 0.0505, 0.0565, 0.0414, 0.0727, 0.0645, 0.0491, 0.0330], + device='cuda:0'), in_proj_covar=tensor([0.0149, 0.0173, 0.0196, 0.0170, 0.0170, 0.0154, 0.0146, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 21:53:31,585 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.500e+02 3.941e+02 4.673e+02 5.511e+02 7.393e+02, threshold=9.346e+02, percent-clipped=0.0 +2023-03-28 21:53:37,700 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-03-28 21:53:54,223 INFO [train.py:892] (0/4) Epoch 27, batch 1500, loss[loss=0.1498, simple_loss=0.2299, pruned_loss=0.03484, over 19685.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2464, pruned_loss=0.04649, over 3946456.50 frames. ], batch size: 59, lr: 5.78e-03, grad_scale: 16.0 +2023-03-28 21:53:55,284 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7983, 3.1229, 3.3005, 3.7801, 2.6207, 3.1598, 2.4191, 2.4455], + device='cuda:0'), covar=tensor([0.0538, 0.1748, 0.0932, 0.0397, 0.1914, 0.0792, 0.1377, 0.1582], + device='cuda:0'), in_proj_covar=tensor([0.0235, 0.0330, 0.0243, 0.0196, 0.0243, 0.0203, 0.0213, 0.0213], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 21:55:22,652 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49766.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:55:51,210 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49778.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:55:57,485 INFO [train.py:892] (0/4) Epoch 27, batch 1550, loss[loss=0.1584, simple_loss=0.2366, pruned_loss=0.04007, over 19695.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2466, pruned_loss=0.04657, over 3947137.13 frames. ], batch size: 101, lr: 5.77e-03, grad_scale: 16.0 +2023-03-28 21:56:16,062 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49788.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 21:57:39,879 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.487e+02 3.954e+02 4.680e+02 6.114e+02 1.039e+03, threshold=9.360e+02, percent-clipped=2.0 +2023-03-28 21:57:52,876 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49827.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 21:58:01,502 INFO [train.py:892] (0/4) Epoch 27, batch 1600, loss[loss=0.1496, simple_loss=0.2275, pruned_loss=0.03588, over 19823.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2464, pruned_loss=0.0463, over 3947840.11 frames. ], batch size: 204, lr: 5.77e-03, grad_scale: 16.0 +2023-03-28 21:58:56,064 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5532, 3.1805, 3.5266, 3.1791, 3.7059, 3.6925, 4.3040, 4.8109], + device='cuda:0'), covar=tensor([0.0527, 0.1534, 0.1393, 0.2093, 0.1777, 0.1406, 0.0584, 0.0455], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0241, 0.0265, 0.0255, 0.0295, 0.0256, 0.0230, 0.0253], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 21:59:58,161 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.60 vs. limit=5.0 +2023-03-28 22:00:08,567 INFO [train.py:892] (0/4) Epoch 27, batch 1650, loss[loss=0.1648, simple_loss=0.2492, pruned_loss=0.04024, over 19638.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.246, pruned_loss=0.04614, over 3947649.47 frames. ], batch size: 68, lr: 5.77e-03, grad_scale: 16.0 +2023-03-28 22:00:36,616 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49892.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:01:18,217 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7338, 3.7493, 2.3672, 4.0067, 4.0831, 1.8394, 3.3366, 3.2245], + device='cuda:0'), covar=tensor([0.0753, 0.0947, 0.2699, 0.0787, 0.0769, 0.2855, 0.1184, 0.0877], + device='cuda:0'), in_proj_covar=tensor([0.0233, 0.0255, 0.0230, 0.0269, 0.0251, 0.0203, 0.0240, 0.0194], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 22:01:18,274 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5909, 2.7506, 3.9863, 3.1477, 3.3452, 3.2100, 2.3298, 2.4760], + device='cuda:0'), covar=tensor([0.1125, 0.3036, 0.0590, 0.1075, 0.1714, 0.1495, 0.2619, 0.2730], + device='cuda:0'), in_proj_covar=tensor([0.0349, 0.0382, 0.0342, 0.0279, 0.0369, 0.0365, 0.0364, 0.0334], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 22:01:49,013 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3030, 5.5682, 5.6043, 5.5257, 5.3042, 5.5717, 5.0457, 5.0890], + device='cuda:0'), covar=tensor([0.0406, 0.0453, 0.0463, 0.0394, 0.0528, 0.0523, 0.0702, 0.0880], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0273, 0.0288, 0.0250, 0.0256, 0.0240, 0.0258, 0.0304], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 22:01:55,265 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.537e+02 3.614e+02 4.321e+02 5.513e+02 1.291e+03, threshold=8.641e+02, percent-clipped=1.0 +2023-03-28 22:02:20,726 INFO [train.py:892] (0/4) Epoch 27, batch 1700, loss[loss=0.1835, simple_loss=0.2665, pruned_loss=0.0503, over 19764.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2475, pruned_loss=0.04692, over 3947605.24 frames. ], batch size: 70, lr: 5.76e-03, grad_scale: 16.0 +2023-03-28 22:02:44,435 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49940.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:02:44,815 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7425, 3.2998, 3.5823, 3.0732, 3.9178, 3.9860, 4.4316, 5.0448], + device='cuda:0'), covar=tensor([0.0494, 0.1565, 0.1454, 0.2302, 0.1677, 0.1178, 0.0592, 0.0512], + device='cuda:0'), in_proj_covar=tensor([0.0250, 0.0239, 0.0264, 0.0254, 0.0294, 0.0254, 0.0229, 0.0251], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 22:04:24,973 INFO [train.py:892] (0/4) Epoch 27, batch 1750, loss[loss=0.1718, simple_loss=0.2462, pruned_loss=0.04865, over 19660.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2488, pruned_loss=0.0476, over 3946617.35 frames. ], batch size: 55, lr: 5.76e-03, grad_scale: 16.0 +2023-03-28 22:05:03,604 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-50000.pt +2023-03-28 22:05:10,127 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1159, 2.3724, 2.1789, 1.4739, 2.1207, 2.3291, 2.2336, 2.2734], + device='cuda:0'), covar=tensor([0.0312, 0.0243, 0.0272, 0.0542, 0.0348, 0.0272, 0.0229, 0.0257], + device='cuda:0'), in_proj_covar=tensor([0.0096, 0.0092, 0.0095, 0.0099, 0.0101, 0.0082, 0.0080, 0.0082], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 22:05:43,055 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0947, 4.1171, 2.4543, 4.4853, 4.7248, 2.1017, 3.9590, 3.5539], + device='cuda:0'), covar=tensor([0.0751, 0.0937, 0.2895, 0.0807, 0.0553, 0.2741, 0.0995, 0.0834], + device='cuda:0'), in_proj_covar=tensor([0.0231, 0.0254, 0.0229, 0.0267, 0.0249, 0.0201, 0.0238, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 22:05:54,959 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.637e+02 3.767e+02 4.699e+02 5.313e+02 7.903e+02, threshold=9.399e+02, percent-clipped=0.0 +2023-03-28 22:06:12,894 INFO [train.py:892] (0/4) Epoch 27, batch 1800, loss[loss=0.199, simple_loss=0.2799, pruned_loss=0.05906, over 19760.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2477, pruned_loss=0.04764, over 3947786.93 frames. ], batch size: 226, lr: 5.76e-03, grad_scale: 16.0 +2023-03-28 22:07:54,768 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50078.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:08:00,316 INFO [train.py:892] (0/4) Epoch 27, batch 1850, loss[loss=0.1784, simple_loss=0.2633, pruned_loss=0.04676, over 19816.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2497, pruned_loss=0.04818, over 3947784.22 frames. ], batch size: 57, lr: 5.76e-03, grad_scale: 16.0 +2023-03-28 22:08:08,015 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-27.pt +2023-03-28 22:09:11,374 INFO [train.py:892] (0/4) Epoch 28, batch 0, loss[loss=0.1643, simple_loss=0.2481, pruned_loss=0.04021, over 19941.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.2481, pruned_loss=0.04021, over 19941.00 frames. ], batch size: 52, lr: 5.65e-03, grad_scale: 16.0 +2023-03-28 22:09:11,376 INFO [train.py:917] (0/4) Computing validation loss +2023-03-28 22:09:45,364 INFO [train.py:926] (0/4) Epoch 28, validation: loss=0.1765, simple_loss=0.2481, pruned_loss=0.05251, over 2883724.00 frames. +2023-03-28 22:09:45,367 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-28 22:09:51,054 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50088.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 22:10:31,015 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5326, 3.4518, 5.0680, 3.8590, 4.1929, 3.8907, 2.7692, 2.9854], + device='cuda:0'), covar=tensor([0.0796, 0.2712, 0.0377, 0.0899, 0.1351, 0.1326, 0.2267, 0.2292], + device='cuda:0'), in_proj_covar=tensor([0.0347, 0.0380, 0.0341, 0.0277, 0.0366, 0.0364, 0.0363, 0.0333], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 22:11:19,949 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.385e+02 4.071e+02 4.813e+02 5.962e+02 1.168e+03, threshold=9.627e+02, percent-clipped=3.0 +2023-03-28 22:11:20,973 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50122.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:11:32,564 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50126.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:11:36,961 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50128.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:11:58,850 INFO [train.py:892] (0/4) Epoch 28, batch 50, loss[loss=0.2093, simple_loss=0.2832, pruned_loss=0.06767, over 19624.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2459, pruned_loss=0.047, over 888470.20 frames. ], batch size: 343, lr: 5.65e-03, grad_scale: 16.0 +2023-03-28 22:11:59,641 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50136.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 22:14:00,315 INFO [train.py:892] (0/4) Epoch 28, batch 100, loss[loss=0.1592, simple_loss=0.2364, pruned_loss=0.04103, over 19861.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2462, pruned_loss=0.04693, over 1566635.92 frames. ], batch size: 115, lr: 5.64e-03, grad_scale: 32.0 +2023-03-28 22:14:01,342 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1109, 2.4852, 2.2730, 1.6542, 2.3288, 2.4295, 2.4220, 2.3973], + device='cuda:0'), covar=tensor([0.0408, 0.0335, 0.0315, 0.0645, 0.0395, 0.0316, 0.0300, 0.0312], + device='cuda:0'), in_proj_covar=tensor([0.0098, 0.0092, 0.0096, 0.0100, 0.0102, 0.0083, 0.0081, 0.0083], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 22:14:07,894 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50189.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:14:18,270 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.1779, 5.4223, 5.6313, 5.3179, 5.4514, 5.2801, 5.3056, 5.0869], + device='cuda:0'), covar=tensor([0.1370, 0.1156, 0.0881, 0.1223, 0.0652, 0.0816, 0.1845, 0.1951], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0323, 0.0367, 0.0295, 0.0273, 0.0272, 0.0354, 0.0391], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-28 22:14:24,010 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50197.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 22:14:25,948 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.0496, 2.1918, 2.2088, 1.8943, 2.3146, 1.9903, 2.2194, 2.2498], + device='cuda:0'), covar=tensor([0.0463, 0.0491, 0.0466, 0.0871, 0.0387, 0.0513, 0.0469, 0.0334], + device='cuda:0'), in_proj_covar=tensor([0.0075, 0.0082, 0.0081, 0.0108, 0.0076, 0.0079, 0.0076, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 22:15:23,506 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.658e+02 4.112e+02 4.923e+02 6.330e+02 1.508e+03, threshold=9.845e+02, percent-clipped=2.0 +2023-03-28 22:15:54,979 INFO [train.py:892] (0/4) Epoch 28, batch 150, loss[loss=0.2648, simple_loss=0.3378, pruned_loss=0.09584, over 19443.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2463, pruned_loss=0.04704, over 2094457.40 frames. ], batch size: 396, lr: 5.64e-03, grad_scale: 32.0 +2023-03-28 22:16:49,883 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50258.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 22:17:57,283 INFO [train.py:892] (0/4) Epoch 28, batch 200, loss[loss=0.1593, simple_loss=0.2479, pruned_loss=0.03533, over 19805.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2455, pruned_loss=0.04629, over 2505553.41 frames. ], batch size: 65, lr: 5.64e-03, grad_scale: 32.0 +2023-03-28 22:18:22,236 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8506, 4.0843, 4.2314, 5.0191, 3.2453, 3.6529, 3.1580, 2.9736], + device='cuda:0'), covar=tensor([0.0456, 0.2041, 0.0811, 0.0303, 0.2119, 0.1054, 0.1276, 0.1767], + device='cuda:0'), in_proj_covar=tensor([0.0239, 0.0332, 0.0245, 0.0197, 0.0247, 0.0204, 0.0214, 0.0216], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 22:19:26,387 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.556e+02 4.059e+02 4.750e+02 5.729e+02 1.085e+03, threshold=9.500e+02, percent-clipped=1.0 +2023-03-28 22:19:43,846 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2198, 2.9713, 3.2828, 2.8450, 3.4551, 3.3720, 4.0638, 4.5479], + device='cuda:0'), covar=tensor([0.0538, 0.1621, 0.1496, 0.2210, 0.1623, 0.1459, 0.0574, 0.0485], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0240, 0.0265, 0.0255, 0.0295, 0.0255, 0.0231, 0.0253], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 22:20:01,048 INFO [train.py:892] (0/4) Epoch 28, batch 250, loss[loss=0.1476, simple_loss=0.2207, pruned_loss=0.03725, over 19805.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.246, pruned_loss=0.04629, over 2825130.67 frames. ], batch size: 67, lr: 5.64e-03, grad_scale: 16.0 +2023-03-28 22:21:44,440 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4886, 2.6711, 4.4219, 3.8810, 4.2363, 4.3404, 4.2451, 4.0713], + device='cuda:0'), covar=tensor([0.0474, 0.0898, 0.0099, 0.0737, 0.0129, 0.0229, 0.0172, 0.0167], + device='cuda:0'), in_proj_covar=tensor([0.0097, 0.0102, 0.0085, 0.0152, 0.0082, 0.0096, 0.0088, 0.0084], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 22:22:07,515 INFO [train.py:892] (0/4) Epoch 28, batch 300, loss[loss=0.1895, simple_loss=0.2544, pruned_loss=0.06232, over 19809.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.246, pruned_loss=0.04609, over 3072850.29 frames. ], batch size: 202, lr: 5.63e-03, grad_scale: 16.0 +2023-03-28 22:23:39,149 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50422.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:23:40,966 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.722e+02 4.064e+02 4.778e+02 6.064e+02 1.211e+03, threshold=9.555e+02, percent-clipped=3.0 +2023-03-28 22:24:15,468 INFO [train.py:892] (0/4) Epoch 28, batch 350, loss[loss=0.1484, simple_loss=0.233, pruned_loss=0.03188, over 19846.00 frames. ], tot_loss[loss=0.1675, simple_loss=0.2445, pruned_loss=0.04525, over 3269095.13 frames. ], batch size: 59, lr: 5.63e-03, grad_scale: 16.0 +2023-03-28 22:25:38,135 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50470.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:26:16,866 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50484.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:26:20,294 INFO [train.py:892] (0/4) Epoch 28, batch 400, loss[loss=0.1604, simple_loss=0.2371, pruned_loss=0.04185, over 19816.00 frames. ], tot_loss[loss=0.1675, simple_loss=0.2445, pruned_loss=0.04528, over 3421136.58 frames. ], batch size: 181, lr: 5.63e-03, grad_scale: 16.0 +2023-03-28 22:27:29,035 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6684, 2.9972, 3.2154, 3.5454, 2.5256, 3.0470, 2.3181, 2.3692], + device='cuda:0'), covar=tensor([0.0557, 0.1791, 0.1032, 0.0476, 0.2149, 0.0856, 0.1453, 0.1756], + device='cuda:0'), in_proj_covar=tensor([0.0241, 0.0335, 0.0247, 0.0198, 0.0248, 0.0207, 0.0216, 0.0218], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 22:27:37,386 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7750, 4.4177, 4.5358, 4.2645, 4.7060, 3.1874, 3.9804, 2.2560], + device='cuda:0'), covar=tensor([0.0161, 0.0219, 0.0126, 0.0178, 0.0124, 0.0965, 0.0697, 0.1459], + device='cuda:0'), in_proj_covar=tensor([0.0103, 0.0143, 0.0112, 0.0132, 0.0117, 0.0133, 0.0139, 0.0126], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 22:27:49,848 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.616e+02 4.023e+02 4.684e+02 5.835e+02 1.086e+03, threshold=9.368e+02, percent-clipped=2.0 +2023-03-28 22:28:11,255 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9077, 3.1111, 3.1755, 3.1670, 2.9986, 3.1326, 2.9152, 3.3227], + device='cuda:0'), covar=tensor([0.0271, 0.0308, 0.0304, 0.0202, 0.0321, 0.0250, 0.0397, 0.0277], + device='cuda:0'), in_proj_covar=tensor([0.0081, 0.0075, 0.0078, 0.0071, 0.0085, 0.0078, 0.0096, 0.0068], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 22:28:16,117 INFO [train.py:892] (0/4) Epoch 28, batch 450, loss[loss=0.1458, simple_loss=0.2261, pruned_loss=0.03281, over 19857.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2447, pruned_loss=0.04554, over 3539188.07 frames. ], batch size: 118, lr: 5.63e-03, grad_scale: 16.0 +2023-03-28 22:28:42,763 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50546.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:28:45,030 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0271, 2.7314, 3.1615, 2.7531, 3.2631, 3.1869, 3.8187, 4.2555], + device='cuda:0'), covar=tensor([0.0580, 0.1841, 0.1615, 0.2202, 0.1813, 0.1706, 0.0629, 0.0604], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0242, 0.0267, 0.0256, 0.0297, 0.0257, 0.0232, 0.0255], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 22:29:01,473 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50553.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 22:29:57,214 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-03-28 22:30:25,710 INFO [train.py:892] (0/4) Epoch 28, batch 500, loss[loss=0.1675, simple_loss=0.2348, pruned_loss=0.05007, over 19794.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.2445, pruned_loss=0.04556, over 3631239.97 frames. ], batch size: 172, lr: 5.62e-03, grad_scale: 16.0 +2023-03-28 22:31:18,853 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50607.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:31:40,083 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.39 vs. limit=5.0 +2023-03-28 22:31:59,939 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.484e+02 3.842e+02 4.586e+02 5.688e+02 1.000e+03, threshold=9.173e+02, percent-clipped=1.0 +2023-03-28 22:32:20,171 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8146, 2.7004, 4.7550, 4.0652, 4.5187, 4.6936, 4.5887, 4.3478], + device='cuda:0'), covar=tensor([0.0442, 0.0961, 0.0097, 0.0809, 0.0131, 0.0182, 0.0141, 0.0148], + device='cuda:0'), in_proj_covar=tensor([0.0098, 0.0102, 0.0086, 0.0152, 0.0083, 0.0096, 0.0088, 0.0084], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 22:32:30,993 INFO [train.py:892] (0/4) Epoch 28, batch 550, loss[loss=0.1637, simple_loss=0.2384, pruned_loss=0.04448, over 19631.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2452, pruned_loss=0.04594, over 3701765.75 frames. ], batch size: 68, lr: 5.62e-03, grad_scale: 16.0 +2023-03-28 22:34:36,333 INFO [train.py:892] (0/4) Epoch 28, batch 600, loss[loss=0.1905, simple_loss=0.2619, pruned_loss=0.05959, over 19850.00 frames. ], tot_loss[loss=0.1675, simple_loss=0.2443, pruned_loss=0.04537, over 3757220.90 frames. ], batch size: 197, lr: 5.62e-03, grad_scale: 16.0 +2023-03-28 22:34:52,964 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9430, 2.4446, 3.8307, 3.3166, 3.7602, 3.9079, 3.7274, 3.6029], + device='cuda:0'), covar=tensor([0.0588, 0.0945, 0.0125, 0.0614, 0.0169, 0.0222, 0.0173, 0.0186], + device='cuda:0'), in_proj_covar=tensor([0.0098, 0.0102, 0.0086, 0.0153, 0.0083, 0.0096, 0.0089, 0.0085], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 22:36:08,580 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.507e+02 3.618e+02 4.238e+02 5.077e+02 1.172e+03, threshold=8.476e+02, percent-clipped=2.0 +2023-03-28 22:36:41,995 INFO [train.py:892] (0/4) Epoch 28, batch 650, loss[loss=0.2471, simple_loss=0.3154, pruned_loss=0.08936, over 19457.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2447, pruned_loss=0.04584, over 3798052.90 frames. ], batch size: 396, lr: 5.61e-03, grad_scale: 16.0 +2023-03-28 22:38:38,019 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50782.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:38:42,894 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50784.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:38:46,704 INFO [train.py:892] (0/4) Epoch 28, batch 700, loss[loss=0.1814, simple_loss=0.2602, pruned_loss=0.05129, over 19821.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2464, pruned_loss=0.04651, over 3832263.88 frames. ], batch size: 50, lr: 5.61e-03, grad_scale: 16.0 +2023-03-28 22:39:41,643 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.57 vs. limit=5.0 +2023-03-28 22:39:44,960 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50808.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 22:40:20,257 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.436e+02 3.963e+02 4.473e+02 5.641e+02 1.140e+03, threshold=8.946e+02, percent-clipped=3.0 +2023-03-28 22:40:43,849 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50832.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:40:51,130 INFO [train.py:892] (0/4) Epoch 28, batch 750, loss[loss=0.1779, simple_loss=0.2601, pruned_loss=0.04788, over 19807.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.246, pruned_loss=0.04611, over 3859058.21 frames. ], batch size: 224, lr: 5.61e-03, grad_scale: 16.0 +2023-03-28 22:41:08,943 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50843.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:41:32,811 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50853.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 22:42:13,268 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50869.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 22:42:55,246 INFO [train.py:892] (0/4) Epoch 28, batch 800, loss[loss=0.1487, simple_loss=0.2162, pruned_loss=0.0406, over 19866.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2449, pruned_loss=0.04558, over 3879771.96 frames. ], batch size: 129, lr: 5.61e-03, grad_scale: 16.0 +2023-03-28 22:43:31,259 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50901.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 22:43:33,263 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50902.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:44:24,107 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.705e+02 4.018e+02 4.822e+02 5.652e+02 1.134e+03, threshold=9.644e+02, percent-clipped=5.0 +2023-03-28 22:44:54,704 INFO [train.py:892] (0/4) Epoch 28, batch 850, loss[loss=0.1419, simple_loss=0.2208, pruned_loss=0.03149, over 19738.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2448, pruned_loss=0.04568, over 3895405.49 frames. ], batch size: 106, lr: 5.60e-03, grad_scale: 16.0 +2023-03-28 22:45:20,245 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50946.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:46:58,577 INFO [train.py:892] (0/4) Epoch 28, batch 900, loss[loss=0.1518, simple_loss=0.2262, pruned_loss=0.03872, over 19843.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.245, pruned_loss=0.04599, over 3907256.40 frames. ], batch size: 142, lr: 5.60e-03, grad_scale: 16.0 +2023-03-28 22:47:51,192 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51007.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:48:08,674 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-28 22:48:31,172 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.571e+02 3.452e+02 4.322e+02 5.014e+02 9.389e+02, threshold=8.643e+02, percent-clipped=0.0 +2023-03-28 22:48:54,411 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4851, 3.8357, 3.8388, 4.5833, 3.0496, 3.2628, 2.9453, 2.7355], + device='cuda:0'), covar=tensor([0.0532, 0.1985, 0.0938, 0.0373, 0.2129, 0.1131, 0.1319, 0.1738], + device='cuda:0'), in_proj_covar=tensor([0.0238, 0.0331, 0.0245, 0.0197, 0.0246, 0.0205, 0.0215, 0.0214], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 22:49:04,813 INFO [train.py:892] (0/4) Epoch 28, batch 950, loss[loss=0.1569, simple_loss=0.2203, pruned_loss=0.04678, over 19830.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2452, pruned_loss=0.04581, over 3918044.65 frames. ], batch size: 166, lr: 5.60e-03, grad_scale: 16.0 +2023-03-28 22:49:09,157 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.67 vs. limit=5.0 +2023-03-28 22:50:10,896 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51062.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:51:04,959 INFO [train.py:892] (0/4) Epoch 28, batch 1000, loss[loss=0.1844, simple_loss=0.2612, pruned_loss=0.05378, over 19763.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2452, pruned_loss=0.04566, over 3925687.12 frames. ], batch size: 253, lr: 5.60e-03, grad_scale: 16.0 +2023-03-28 22:52:34,540 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.807e+02 3.916e+02 4.760e+02 5.853e+02 1.174e+03, threshold=9.520e+02, percent-clipped=7.0 +2023-03-28 22:52:35,800 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51123.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:52:53,998 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5971, 3.1450, 3.5382, 3.1493, 3.7719, 3.7909, 4.4506, 4.8833], + device='cuda:0'), covar=tensor([0.0567, 0.1751, 0.1612, 0.2187, 0.1672, 0.1364, 0.0562, 0.0593], + device='cuda:0'), in_proj_covar=tensor([0.0249, 0.0239, 0.0263, 0.0252, 0.0294, 0.0252, 0.0228, 0.0249], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 22:53:05,987 INFO [train.py:892] (0/4) Epoch 28, batch 1050, loss[loss=0.1533, simple_loss=0.2278, pruned_loss=0.03937, over 19838.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2452, pruned_loss=0.04582, over 3931971.83 frames. ], batch size: 115, lr: 5.59e-03, grad_scale: 16.0 +2023-03-28 22:53:11,864 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51138.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:54:08,771 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51164.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 22:54:25,316 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51172.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 22:55:00,670 INFO [train.py:892] (0/4) Epoch 28, batch 1100, loss[loss=0.1484, simple_loss=0.2335, pruned_loss=0.03163, over 19597.00 frames. ], tot_loss[loss=0.1675, simple_loss=0.2445, pruned_loss=0.04532, over 3937430.84 frames. ], batch size: 44, lr: 5.59e-03, grad_scale: 16.0 +2023-03-28 22:55:39,917 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51202.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:56:23,853 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-28 22:56:34,345 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.745e+02 3.704e+02 4.632e+02 5.921e+02 1.286e+03, threshold=9.265e+02, percent-clipped=1.0 +2023-03-28 22:56:57,489 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51233.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 22:57:02,657 INFO [train.py:892] (0/4) Epoch 28, batch 1150, loss[loss=0.1726, simple_loss=0.2457, pruned_loss=0.04976, over 19737.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2437, pruned_loss=0.04514, over 3941039.13 frames. ], batch size: 76, lr: 5.59e-03, grad_scale: 16.0 +2023-03-28 22:57:42,615 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51250.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 22:59:16,802 INFO [train.py:892] (0/4) Epoch 28, batch 1200, loss[loss=0.1947, simple_loss=0.2652, pruned_loss=0.06207, over 19801.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2453, pruned_loss=0.04586, over 3940015.07 frames. ], batch size: 288, lr: 5.58e-03, grad_scale: 16.0 +2023-03-28 22:59:31,332 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.81 vs. limit=5.0 +2023-03-28 22:59:56,132 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51302.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:00:45,086 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.551e+02 3.659e+02 4.255e+02 5.414e+02 8.940e+02, threshold=8.510e+02, percent-clipped=0.0 +2023-03-28 23:01:16,078 INFO [train.py:892] (0/4) Epoch 28, batch 1250, loss[loss=0.1473, simple_loss=0.2188, pruned_loss=0.03789, over 19800.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.245, pruned_loss=0.04568, over 3942285.02 frames. ], batch size: 150, lr: 5.58e-03, grad_scale: 16.0 +2023-03-28 23:02:50,945 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4254, 2.5043, 2.5090, 1.8290, 2.6053, 2.1697, 2.5766, 2.4346], + device='cuda:0'), covar=tensor([0.0516, 0.0446, 0.0515, 0.1058, 0.0405, 0.0534, 0.0428, 0.0392], + device='cuda:0'), in_proj_covar=tensor([0.0075, 0.0082, 0.0081, 0.0108, 0.0076, 0.0079, 0.0076, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 23:03:06,371 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51382.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:03:13,701 INFO [train.py:892] (0/4) Epoch 28, batch 1300, loss[loss=0.1605, simple_loss=0.2268, pruned_loss=0.04707, over 19605.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2453, pruned_loss=0.04588, over 3944236.17 frames. ], batch size: 46, lr: 5.58e-03, grad_scale: 16.0 +2023-03-28 23:03:30,039 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51392.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:04:35,493 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51418.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:04:45,653 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.672e+02 4.378e+02 5.169e+02 6.239e+02 1.127e+03, threshold=1.034e+03, percent-clipped=3.0 +2023-03-28 23:05:20,067 INFO [train.py:892] (0/4) Epoch 28, batch 1350, loss[loss=0.1455, simple_loss=0.2196, pruned_loss=0.03563, over 19639.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2448, pruned_loss=0.04556, over 3946831.27 frames. ], batch size: 47, lr: 5.58e-03, grad_scale: 16.0 +2023-03-28 23:05:25,239 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51438.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:05:36,591 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51443.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:05:59,130 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51453.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:06:25,372 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51464.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 23:07:15,811 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-28 23:07:22,855 INFO [train.py:892] (0/4) Epoch 28, batch 1400, loss[loss=0.1902, simple_loss=0.2669, pruned_loss=0.05676, over 19732.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2469, pruned_loss=0.04696, over 3946188.31 frames. ], batch size: 310, lr: 5.57e-03, grad_scale: 16.0 +2023-03-28 23:07:23,693 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51486.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:07:44,030 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51495.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:08:23,135 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51512.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 23:08:48,583 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.195e+02 3.889e+02 4.620e+02 5.536e+02 9.901e+02, threshold=9.240e+02, percent-clipped=0.0 +2023-03-28 23:09:01,327 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51528.0, num_to_drop=1, layers_to_drop={3} +2023-03-28 23:09:20,616 INFO [train.py:892] (0/4) Epoch 28, batch 1450, loss[loss=0.1735, simple_loss=0.2576, pruned_loss=0.04472, over 19885.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2463, pruned_loss=0.04634, over 3947407.08 frames. ], batch size: 84, lr: 5.57e-03, grad_scale: 16.0 +2023-03-28 23:09:29,972 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1918, 4.0650, 4.4969, 4.0797, 3.8420, 4.3324, 4.1786, 4.5473], + device='cuda:0'), covar=tensor([0.0820, 0.0393, 0.0400, 0.0433, 0.1013, 0.0596, 0.0474, 0.0384], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0220, 0.0220, 0.0233, 0.0205, 0.0238, 0.0231, 0.0213], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 23:10:03,014 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-28 23:10:13,833 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51556.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:10:51,427 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-28 23:11:27,585 INFO [train.py:892] (0/4) Epoch 28, batch 1500, loss[loss=0.1543, simple_loss=0.2373, pruned_loss=0.03563, over 19774.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2458, pruned_loss=0.04625, over 3949015.55 frames. ], batch size: 52, lr: 5.57e-03, grad_scale: 16.0 +2023-03-28 23:12:06,532 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51602.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:13:00,842 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.657e+02 3.781e+02 4.719e+02 5.439e+02 9.463e+02, threshold=9.439e+02, percent-clipped=1.0 +2023-03-28 23:13:30,302 INFO [train.py:892] (0/4) Epoch 28, batch 1550, loss[loss=0.1882, simple_loss=0.2709, pruned_loss=0.05273, over 19764.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2451, pruned_loss=0.0459, over 3949428.15 frames. ], batch size: 49, lr: 5.57e-03, grad_scale: 16.0 +2023-03-28 23:14:07,327 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51650.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:14:38,586 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4651, 4.1881, 4.2024, 4.4709, 4.1613, 4.5516, 4.5596, 4.7469], + device='cuda:0'), covar=tensor([0.0614, 0.0397, 0.0531, 0.0401, 0.0710, 0.0439, 0.0413, 0.0304], + device='cuda:0'), in_proj_covar=tensor([0.0147, 0.0172, 0.0198, 0.0171, 0.0168, 0.0153, 0.0146, 0.0194], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 23:15:34,972 INFO [train.py:892] (0/4) Epoch 28, batch 1600, loss[loss=0.1627, simple_loss=0.2398, pruned_loss=0.04281, over 19810.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2443, pruned_loss=0.04549, over 3950903.10 frames. ], batch size: 72, lr: 5.56e-03, grad_scale: 16.0 +2023-03-28 23:16:51,836 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51718.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:17:03,720 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.853e+02 3.747e+02 4.417e+02 5.191e+02 1.106e+03, threshold=8.834e+02, percent-clipped=1.0 +2023-03-28 23:17:34,753 INFO [train.py:892] (0/4) Epoch 28, batch 1650, loss[loss=0.1546, simple_loss=0.2253, pruned_loss=0.04199, over 19838.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.2442, pruned_loss=0.04532, over 3951126.52 frames. ], batch size: 144, lr: 5.56e-03, grad_scale: 16.0 +2023-03-28 23:17:40,414 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51738.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:18:00,955 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51747.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:18:03,177 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51748.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:18:50,691 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51766.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:19:37,685 INFO [train.py:892] (0/4) Epoch 28, batch 1700, loss[loss=0.1792, simple_loss=0.2567, pruned_loss=0.05087, over 19735.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2456, pruned_loss=0.04546, over 3948954.13 frames. ], batch size: 77, lr: 5.56e-03, grad_scale: 16.0 +2023-03-28 23:20:13,505 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5625, 4.3132, 4.3001, 4.5907, 4.3052, 4.7075, 4.6729, 4.8298], + device='cuda:0'), covar=tensor([0.0581, 0.0376, 0.0485, 0.0359, 0.0671, 0.0417, 0.0447, 0.0304], + device='cuda:0'), in_proj_covar=tensor([0.0148, 0.0173, 0.0199, 0.0172, 0.0170, 0.0155, 0.0147, 0.0195], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-28 23:20:27,273 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-28 23:20:35,779 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51808.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:20:55,298 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3870, 4.8880, 4.9835, 4.6925, 5.2319, 3.1738, 4.0977, 2.9219], + device='cuda:0'), covar=tensor([0.0139, 0.0190, 0.0121, 0.0187, 0.0130, 0.1000, 0.1046, 0.1314], + device='cuda:0'), in_proj_covar=tensor([0.0105, 0.0144, 0.0113, 0.0134, 0.0119, 0.0135, 0.0142, 0.0128], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 23:21:06,662 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.935e+02 3.915e+02 4.350e+02 5.709e+02 1.199e+03, threshold=8.700e+02, percent-clipped=1.0 +2023-03-28 23:21:17,015 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51828.0, num_to_drop=1, layers_to_drop={2} +2023-03-28 23:21:33,039 INFO [train.py:892] (0/4) Epoch 28, batch 1750, loss[loss=0.1567, simple_loss=0.2258, pruned_loss=0.04379, over 19804.00 frames. ], tot_loss[loss=0.168, simple_loss=0.245, pruned_loss=0.04545, over 3949837.82 frames. ], batch size: 126, lr: 5.55e-03, grad_scale: 16.0 +2023-03-28 23:22:05,843 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51851.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:22:22,771 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7357, 2.8187, 4.0932, 3.2055, 3.4608, 3.2407, 2.3233, 2.4882], + device='cuda:0'), covar=tensor([0.0977, 0.2987, 0.0595, 0.1056, 0.1616, 0.1419, 0.2490, 0.2749], + device='cuda:0'), in_proj_covar=tensor([0.0351, 0.0385, 0.0344, 0.0283, 0.0371, 0.0370, 0.0369, 0.0338], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 23:22:53,471 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51876.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 23:23:13,398 INFO [train.py:892] (0/4) Epoch 28, batch 1800, loss[loss=0.1716, simple_loss=0.2525, pruned_loss=0.0453, over 19718.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2449, pruned_loss=0.04544, over 3948187.87 frames. ], batch size: 62, lr: 5.55e-03, grad_scale: 16.0 +2023-03-28 23:23:18,928 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.83 vs. limit=5.0 +2023-03-28 23:23:41,625 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.8305, 1.8112, 1.9413, 1.8339, 1.8309, 1.9476, 1.8220, 1.8604], + device='cuda:0'), covar=tensor([0.0340, 0.0316, 0.0307, 0.0333, 0.0460, 0.0298, 0.0441, 0.0308], + device='cuda:0'), in_proj_covar=tensor([0.0082, 0.0076, 0.0079, 0.0073, 0.0087, 0.0079, 0.0096, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 23:23:50,307 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8574, 3.1565, 3.3649, 3.8203, 2.6122, 3.1712, 2.4112, 2.4169], + device='cuda:0'), covar=tensor([0.0605, 0.1902, 0.0926, 0.0444, 0.2067, 0.0850, 0.1423, 0.1751], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0332, 0.0246, 0.0199, 0.0246, 0.0206, 0.0215, 0.0215], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 23:24:26,510 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.458e+02 3.828e+02 4.368e+02 5.479e+02 1.522e+03, threshold=8.737e+02, percent-clipped=1.0 +2023-03-28 23:24:33,059 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3069, 4.7861, 4.8862, 4.6396, 5.1821, 3.4143, 4.1514, 2.6767], + device='cuda:0'), covar=tensor([0.0184, 0.0208, 0.0149, 0.0202, 0.0146, 0.0836, 0.1001, 0.1564], + device='cuda:0'), in_proj_covar=tensor([0.0105, 0.0144, 0.0113, 0.0134, 0.0119, 0.0134, 0.0142, 0.0128], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 23:24:52,034 INFO [train.py:892] (0/4) Epoch 28, batch 1850, loss[loss=0.1667, simple_loss=0.263, pruned_loss=0.03524, over 19822.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2473, pruned_loss=0.0459, over 3946881.97 frames. ], batch size: 57, lr: 5.55e-03, grad_scale: 16.0 +2023-03-28 23:24:59,910 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-28.pt +2023-03-28 23:25:59,226 INFO [train.py:892] (0/4) Epoch 29, batch 0, loss[loss=0.1679, simple_loss=0.2428, pruned_loss=0.04645, over 19800.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2428, pruned_loss=0.04645, over 19800.00 frames. ], batch size: 200, lr: 5.45e-03, grad_scale: 16.0 +2023-03-28 23:25:59,228 INFO [train.py:917] (0/4) Computing validation loss +2023-03-28 23:26:37,599 INFO [train.py:926] (0/4) Epoch 29, validation: loss=0.1782, simple_loss=0.2489, pruned_loss=0.05378, over 2883724.00 frames. +2023-03-28 23:26:37,602 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-28 23:28:41,181 INFO [train.py:892] (0/4) Epoch 29, batch 50, loss[loss=0.1577, simple_loss=0.2393, pruned_loss=0.03808, over 19763.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2346, pruned_loss=0.04135, over 891989.70 frames. ], batch size: 88, lr: 5.45e-03, grad_scale: 16.0 +2023-03-28 23:29:03,653 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-52000.pt +2023-03-28 23:29:49,302 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.65 vs. limit=5.0 +2023-03-28 23:30:05,631 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.398e+02 3.680e+02 4.499e+02 5.217e+02 8.656e+02, threshold=8.998e+02, percent-clipped=0.0 +2023-03-28 23:30:16,598 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5995, 2.1008, 2.5689, 2.8712, 3.3117, 3.3619, 3.3677, 3.3320], + device='cuda:0'), covar=tensor([0.1102, 0.1801, 0.1307, 0.0751, 0.0520, 0.0378, 0.0463, 0.0513], + device='cuda:0'), in_proj_covar=tensor([0.0159, 0.0171, 0.0178, 0.0151, 0.0135, 0.0130, 0.0122, 0.0115], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-28 23:30:44,485 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52038.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:30:48,782 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52040.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:30:50,315 INFO [train.py:892] (0/4) Epoch 29, batch 100, loss[loss=0.1395, simple_loss=0.2148, pruned_loss=0.03213, over 19866.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.2371, pruned_loss=0.04203, over 1570711.68 frames. ], batch size: 154, lr: 5.45e-03, grad_scale: 16.0 +2023-03-28 23:31:09,916 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52048.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:32:05,859 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2306, 3.0990, 4.9799, 4.2206, 4.7541, 4.9854, 4.9414, 4.5131], + device='cuda:0'), covar=tensor([0.0404, 0.0830, 0.0100, 0.0963, 0.0127, 0.0154, 0.0120, 0.0143], + device='cuda:0'), in_proj_covar=tensor([0.0099, 0.0102, 0.0086, 0.0153, 0.0084, 0.0097, 0.0090, 0.0086], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 23:32:43,469 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52086.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:32:54,794 INFO [train.py:892] (0/4) Epoch 29, batch 150, loss[loss=0.1724, simple_loss=0.2462, pruned_loss=0.04925, over 19724.00 frames. ], tot_loss[loss=0.1629, simple_loss=0.239, pruned_loss=0.04338, over 2097800.14 frames. ], batch size: 63, lr: 5.44e-03, grad_scale: 16.0 +2023-03-28 23:33:09,918 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52096.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:33:21,668 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52101.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:33:26,994 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52103.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:34:15,960 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.333e+02 4.011e+02 4.815e+02 5.907e+02 9.716e+02, threshold=9.631e+02, percent-clipped=1.0 +2023-03-28 23:34:59,434 INFO [train.py:892] (0/4) Epoch 29, batch 200, loss[loss=0.1814, simple_loss=0.2553, pruned_loss=0.05371, over 19670.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2419, pruned_loss=0.04429, over 2507719.77 frames. ], batch size: 73, lr: 5.44e-03, grad_scale: 16.0 +2023-03-28 23:35:24,468 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52151.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:35:25,014 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.73 vs. limit=5.0 +2023-03-28 23:36:59,112 INFO [train.py:892] (0/4) Epoch 29, batch 250, loss[loss=0.1561, simple_loss=0.2317, pruned_loss=0.04024, over 19885.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.2428, pruned_loss=0.04435, over 2826949.98 frames. ], batch size: 52, lr: 5.44e-03, grad_scale: 16.0 +2023-03-28 23:37:05,199 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52193.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:37:17,473 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52199.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:38:17,973 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.564e+02 3.825e+02 4.477e+02 5.151e+02 8.477e+02, threshold=8.954e+02, percent-clipped=0.0 +2023-03-28 23:39:01,939 INFO [train.py:892] (0/4) Epoch 29, batch 300, loss[loss=0.1601, simple_loss=0.2425, pruned_loss=0.03891, over 19859.00 frames. ], tot_loss[loss=0.1657, simple_loss=0.2434, pruned_loss=0.04397, over 3076414.71 frames. ], batch size: 118, lr: 5.44e-03, grad_scale: 16.0 +2023-03-28 23:39:35,410 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52254.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:40:39,094 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-03-28 23:41:06,895 INFO [train.py:892] (0/4) Epoch 29, batch 350, loss[loss=0.1521, simple_loss=0.217, pruned_loss=0.04357, over 19788.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.2428, pruned_loss=0.04345, over 3270709.07 frames. ], batch size: 120, lr: 5.43e-03, grad_scale: 8.0 +2023-03-28 23:42:02,299 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52312.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:42:30,756 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.445e+02 3.965e+02 4.571e+02 5.589e+02 1.011e+03, threshold=9.142e+02, percent-clipped=1.0 +2023-03-28 23:43:07,775 INFO [train.py:892] (0/4) Epoch 29, batch 400, loss[loss=0.1364, simple_loss=0.2138, pruned_loss=0.02952, over 19721.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.243, pruned_loss=0.044, over 3421782.96 frames. ], batch size: 104, lr: 5.43e-03, grad_scale: 8.0 +2023-03-28 23:44:27,276 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52373.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:45:13,530 INFO [train.py:892] (0/4) Epoch 29, batch 450, loss[loss=0.1809, simple_loss=0.2547, pruned_loss=0.05354, over 19822.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2447, pruned_loss=0.04441, over 3537020.35 frames. ], batch size: 231, lr: 5.43e-03, grad_scale: 8.0 +2023-03-28 23:45:28,303 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52396.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:45:46,389 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52403.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:46:29,392 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7537, 3.6523, 3.6453, 3.4122, 3.7265, 2.7565, 3.0989, 1.7915], + device='cuda:0'), covar=tensor([0.0208, 0.0241, 0.0150, 0.0206, 0.0168, 0.1146, 0.0678, 0.1711], + device='cuda:0'), in_proj_covar=tensor([0.0104, 0.0144, 0.0113, 0.0133, 0.0119, 0.0134, 0.0142, 0.0127], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 23:46:38,511 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.695e+02 3.819e+02 4.668e+02 5.658e+02 1.148e+03, threshold=9.335e+02, percent-clipped=4.0 +2023-03-28 23:46:42,083 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-03-28 23:47:02,430 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7518, 2.2483, 3.7183, 3.1640, 3.6721, 3.7302, 3.5271, 3.5278], + device='cuda:0'), covar=tensor([0.0669, 0.1061, 0.0119, 0.0531, 0.0156, 0.0237, 0.0200, 0.0181], + device='cuda:0'), in_proj_covar=tensor([0.0099, 0.0102, 0.0087, 0.0154, 0.0084, 0.0098, 0.0090, 0.0086], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 23:47:22,352 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1249, 4.1241, 2.3926, 4.4305, 4.5529, 1.9880, 3.7856, 3.3884], + device='cuda:0'), covar=tensor([0.0639, 0.0844, 0.2941, 0.0805, 0.0539, 0.2913, 0.0980, 0.0835], + device='cuda:0'), in_proj_covar=tensor([0.0232, 0.0256, 0.0229, 0.0272, 0.0252, 0.0203, 0.0238, 0.0195], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 23:47:23,496 INFO [train.py:892] (0/4) Epoch 29, batch 500, loss[loss=0.1438, simple_loss=0.2185, pruned_loss=0.0346, over 19757.00 frames. ], tot_loss[loss=0.167, simple_loss=0.245, pruned_loss=0.04452, over 3628705.00 frames. ], batch size: 88, lr: 5.43e-03, grad_scale: 8.0 +2023-03-28 23:47:50,406 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52451.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:49:23,605 INFO [train.py:892] (0/4) Epoch 29, batch 550, loss[loss=0.1579, simple_loss=0.2384, pruned_loss=0.03867, over 19835.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2459, pruned_loss=0.04521, over 3698024.65 frames. ], batch size: 90, lr: 5.42e-03, grad_scale: 8.0 +2023-03-28 23:50:29,341 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0953, 4.1696, 2.4233, 4.4222, 4.6086, 1.9189, 3.7199, 3.3277], + device='cuda:0'), covar=tensor([0.0682, 0.0783, 0.2896, 0.0867, 0.0526, 0.3072, 0.1070, 0.0945], + device='cuda:0'), in_proj_covar=tensor([0.0233, 0.0255, 0.0230, 0.0273, 0.0252, 0.0204, 0.0238, 0.0196], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-28 23:50:40,437 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3247, 3.2073, 3.5945, 3.2556, 3.1139, 3.5149, 3.4106, 3.6540], + device='cuda:0'), covar=tensor([0.0886, 0.0431, 0.0411, 0.0471, 0.1675, 0.0615, 0.0499, 0.0418], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0221, 0.0220, 0.0233, 0.0206, 0.0238, 0.0230, 0.0212], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-28 23:50:45,580 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.600e+02 4.376e+02 5.079e+02 6.009e+02 1.236e+03, threshold=1.016e+03, percent-clipped=4.0 +2023-03-28 23:51:25,982 INFO [train.py:892] (0/4) Epoch 29, batch 600, loss[loss=0.1442, simple_loss=0.2201, pruned_loss=0.03418, over 19833.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2455, pruned_loss=0.04514, over 3754618.90 frames. ], batch size: 127, lr: 5.42e-03, grad_scale: 8.0 +2023-03-28 23:51:36,139 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-03-28 23:51:48,291 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52549.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:52:04,832 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52555.0, num_to_drop=1, layers_to_drop={0} +2023-03-28 23:53:25,786 INFO [train.py:892] (0/4) Epoch 29, batch 650, loss[loss=0.1796, simple_loss=0.2546, pruned_loss=0.05235, over 19747.00 frames. ], tot_loss[loss=0.1673, simple_loss=0.2446, pruned_loss=0.04504, over 3796410.48 frames. ], batch size: 77, lr: 5.42e-03, grad_scale: 8.0 +2023-03-28 23:54:27,083 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52616.0, num_to_drop=1, layers_to_drop={1} +2023-03-28 23:54:45,458 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.294e+02 3.664e+02 4.679e+02 5.983e+02 1.020e+03, threshold=9.358e+02, percent-clipped=1.0 +2023-03-28 23:55:31,361 INFO [train.py:892] (0/4) Epoch 29, batch 700, loss[loss=0.1474, simple_loss=0.2262, pruned_loss=0.03435, over 19753.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2446, pruned_loss=0.04473, over 3829451.03 frames. ], batch size: 84, lr: 5.41e-03, grad_scale: 8.0 +2023-03-28 23:55:55,961 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.63 vs. limit=5.0 +2023-03-28 23:56:36,978 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-03-28 23:56:40,005 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52668.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:57:32,117 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.55 vs. limit=5.0 +2023-03-28 23:57:34,778 INFO [train.py:892] (0/4) Epoch 29, batch 750, loss[loss=0.1494, simple_loss=0.2291, pruned_loss=0.03481, over 19872.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2442, pruned_loss=0.04439, over 3855415.21 frames. ], batch size: 136, lr: 5.41e-03, grad_scale: 8.0 +2023-03-28 23:57:46,800 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52696.0, num_to_drop=0, layers_to_drop=set() +2023-03-28 23:58:51,484 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.283e+02 3.891e+02 4.308e+02 5.320e+02 9.738e+02, threshold=8.617e+02, percent-clipped=1.0 +2023-03-28 23:59:16,915 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-28 23:59:30,913 INFO [train.py:892] (0/4) Epoch 29, batch 800, loss[loss=0.1506, simple_loss=0.2338, pruned_loss=0.03369, over 19853.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2443, pruned_loss=0.04414, over 3876547.51 frames. ], batch size: 59, lr: 5.41e-03, grad_scale: 8.0 +2023-03-28 23:59:38,515 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52744.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:00:01,300 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52752.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:01:37,829 INFO [train.py:892] (0/4) Epoch 29, batch 850, loss[loss=0.2301, simple_loss=0.3044, pruned_loss=0.07786, over 19627.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2445, pruned_loss=0.0441, over 3892595.71 frames. ], batch size: 359, lr: 5.41e-03, grad_scale: 8.0 +2023-03-29 00:01:46,412 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8764, 2.3930, 2.7844, 3.1545, 3.6229, 3.7641, 3.7323, 3.7868], + device='cuda:0'), covar=tensor([0.0874, 0.1583, 0.1192, 0.0641, 0.0415, 0.0276, 0.0382, 0.0391], + device='cuda:0'), in_proj_covar=tensor([0.0156, 0.0168, 0.0175, 0.0148, 0.0131, 0.0128, 0.0120, 0.0112], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 00:02:05,907 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-03-29 00:02:27,090 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-29 00:02:31,947 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9701, 3.8957, 4.2651, 3.8769, 3.6644, 4.1207, 3.9681, 4.3277], + device='cuda:0'), covar=tensor([0.0789, 0.0341, 0.0334, 0.0402, 0.1075, 0.0544, 0.0473, 0.0347], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0220, 0.0220, 0.0232, 0.0206, 0.0239, 0.0230, 0.0212], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 00:02:32,077 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52813.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:02:58,925 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.946e+02 3.447e+02 4.095e+02 4.667e+02 7.447e+02, threshold=8.190e+02, percent-clipped=0.0 +2023-03-29 00:03:38,957 INFO [train.py:892] (0/4) Epoch 29, batch 900, loss[loss=0.1557, simple_loss=0.2306, pruned_loss=0.04043, over 19795.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2449, pruned_loss=0.044, over 3905963.39 frames. ], batch size: 149, lr: 5.40e-03, grad_scale: 8.0 +2023-03-29 00:03:58,230 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52849.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:04:06,772 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8443, 5.1304, 5.1947, 5.0831, 4.8422, 5.1562, 4.6156, 4.6912], + device='cuda:0'), covar=tensor([0.0459, 0.0442, 0.0459, 0.0419, 0.0562, 0.0484, 0.0697, 0.1006], + device='cuda:0'), in_proj_covar=tensor([0.0263, 0.0280, 0.0292, 0.0254, 0.0258, 0.0244, 0.0263, 0.0307], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 00:05:35,926 INFO [train.py:892] (0/4) Epoch 29, batch 950, loss[loss=0.1937, simple_loss=0.2599, pruned_loss=0.06379, over 19807.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2437, pruned_loss=0.04373, over 3916579.52 frames. ], batch size: 181, lr: 5.40e-03, grad_scale: 8.0 +2023-03-29 00:05:50,932 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52897.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:06:27,916 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52911.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 00:06:58,641 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.525e+02 3.679e+02 4.624e+02 5.272e+02 1.115e+03, threshold=9.248e+02, percent-clipped=1.0 +2023-03-29 00:07:29,032 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9928, 3.9929, 4.3444, 4.0961, 4.3256, 3.8681, 4.1066, 3.8561], + device='cuda:0'), covar=tensor([0.1540, 0.1780, 0.1026, 0.1414, 0.1030, 0.1137, 0.1915, 0.2347], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0326, 0.0367, 0.0295, 0.0273, 0.0275, 0.0355, 0.0388], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 00:07:41,661 INFO [train.py:892] (0/4) Epoch 29, batch 1000, loss[loss=0.1606, simple_loss=0.2412, pruned_loss=0.03998, over 19822.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2445, pruned_loss=0.04403, over 3923005.49 frames. ], batch size: 72, lr: 5.40e-03, grad_scale: 8.0 +2023-03-29 00:07:42,932 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.47 vs. limit=5.0 +2023-03-29 00:08:46,238 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52968.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:09:39,915 INFO [train.py:892] (0/4) Epoch 29, batch 1050, loss[loss=0.1579, simple_loss=0.2383, pruned_loss=0.03875, over 19649.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.245, pruned_loss=0.04446, over 3927506.45 frames. ], batch size: 47, lr: 5.40e-03, grad_scale: 8.0 +2023-03-29 00:10:16,712 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0377, 2.8310, 3.1531, 2.7615, 3.2913, 3.2460, 3.8687, 4.2513], + device='cuda:0'), covar=tensor([0.0619, 0.1759, 0.1685, 0.2272, 0.1776, 0.1566, 0.0687, 0.0645], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0240, 0.0267, 0.0254, 0.0296, 0.0256, 0.0231, 0.0253], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 00:10:42,371 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53016.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:10:42,522 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7262, 4.4868, 4.4744, 4.2374, 4.7426, 3.1119, 3.7443, 2.2490], + device='cuda:0'), covar=tensor([0.0239, 0.0238, 0.0192, 0.0225, 0.0182, 0.0989, 0.0990, 0.1722], + device='cuda:0'), in_proj_covar=tensor([0.0106, 0.0145, 0.0114, 0.0134, 0.0119, 0.0135, 0.0142, 0.0128], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 00:10:59,359 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.507e+02 3.902e+02 4.763e+02 5.659e+02 9.296e+02, threshold=9.526e+02, percent-clipped=1.0 +2023-03-29 00:11:39,411 INFO [train.py:892] (0/4) Epoch 29, batch 1100, loss[loss=0.1745, simple_loss=0.2563, pruned_loss=0.0463, over 19896.00 frames. ], tot_loss[loss=0.1661, simple_loss=0.2445, pruned_loss=0.04387, over 3931795.57 frames. ], batch size: 71, lr: 5.39e-03, grad_scale: 8.0 +2023-03-29 00:12:17,907 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53056.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:12:20,273 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8450, 2.7238, 2.9704, 2.7210, 3.2080, 3.1035, 3.6725, 4.0699], + device='cuda:0'), covar=tensor([0.0637, 0.1818, 0.1697, 0.2163, 0.1578, 0.1595, 0.0718, 0.0631], + device='cuda:0'), in_proj_covar=tensor([0.0250, 0.0239, 0.0266, 0.0254, 0.0295, 0.0255, 0.0230, 0.0252], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 00:13:40,595 INFO [train.py:892] (0/4) Epoch 29, batch 1150, loss[loss=0.169, simple_loss=0.247, pruned_loss=0.04553, over 19803.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2434, pruned_loss=0.04377, over 3935648.92 frames. ], batch size: 126, lr: 5.39e-03, grad_scale: 8.0 +2023-03-29 00:14:25,695 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53108.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:14:42,020 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.29 vs. limit=5.0 +2023-03-29 00:14:47,986 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53117.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:15:03,920 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.770e+02 4.130e+02 4.907e+02 5.867e+02 8.841e+02, threshold=9.813e+02, percent-clipped=0.0 +2023-03-29 00:15:41,713 INFO [train.py:892] (0/4) Epoch 29, batch 1200, loss[loss=0.1622, simple_loss=0.2359, pruned_loss=0.04423, over 19764.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2443, pruned_loss=0.04375, over 3938113.53 frames. ], batch size: 226, lr: 5.39e-03, grad_scale: 8.0 +2023-03-29 00:17:41,362 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-29 00:17:48,590 INFO [train.py:892] (0/4) Epoch 29, batch 1250, loss[loss=0.1624, simple_loss=0.2389, pruned_loss=0.04297, over 19779.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2435, pruned_loss=0.04372, over 3941309.85 frames. ], batch size: 113, lr: 5.39e-03, grad_scale: 8.0 +2023-03-29 00:18:19,059 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-29 00:18:38,589 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53211.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 00:18:43,644 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0764, 2.7839, 3.1074, 2.8065, 3.3181, 3.2814, 3.9115, 4.3146], + device='cuda:0'), covar=tensor([0.0594, 0.1823, 0.1555, 0.2152, 0.1613, 0.1441, 0.0598, 0.0573], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0237, 0.0263, 0.0251, 0.0292, 0.0253, 0.0228, 0.0249], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 00:19:09,954 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.614e+02 3.724e+02 4.127e+02 5.069e+02 9.146e+02, threshold=8.253e+02, percent-clipped=0.0 +2023-03-29 00:19:52,117 INFO [train.py:892] (0/4) Epoch 29, batch 1300, loss[loss=0.1601, simple_loss=0.2368, pruned_loss=0.04169, over 19713.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2431, pruned_loss=0.04346, over 3943974.01 frames. ], batch size: 60, lr: 5.38e-03, grad_scale: 8.0 +2023-03-29 00:19:53,012 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53241.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:20:38,616 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53259.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 00:21:58,524 INFO [train.py:892] (0/4) Epoch 29, batch 1350, loss[loss=0.2012, simple_loss=0.2674, pruned_loss=0.06748, over 19807.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2428, pruned_loss=0.04359, over 3946116.75 frames. ], batch size: 288, lr: 5.38e-03, grad_scale: 8.0 +2023-03-29 00:22:25,965 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53302.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:22:30,245 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53304.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:22:58,869 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53315.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:23:19,497 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.794e+02 3.937e+02 4.567e+02 5.405e+02 8.872e+02, threshold=9.134e+02, percent-clipped=4.0 +2023-03-29 00:24:04,121 INFO [train.py:892] (0/4) Epoch 29, batch 1400, loss[loss=0.1553, simple_loss=0.2342, pruned_loss=0.03814, over 19737.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2415, pruned_loss=0.04316, over 3946865.19 frames. ], batch size: 134, lr: 5.38e-03, grad_scale: 8.0 +2023-03-29 00:24:37,197 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.7507, 6.0557, 6.0934, 5.9881, 5.7923, 6.0870, 5.4073, 5.4597], + device='cuda:0'), covar=tensor([0.0414, 0.0428, 0.0522, 0.0426, 0.0573, 0.0470, 0.0681, 0.0978], + device='cuda:0'), in_proj_covar=tensor([0.0265, 0.0283, 0.0292, 0.0257, 0.0260, 0.0246, 0.0264, 0.0309], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 00:25:04,321 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53365.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:25:09,102 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53367.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:25:30,824 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53376.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:26:04,691 INFO [train.py:892] (0/4) Epoch 29, batch 1450, loss[loss=0.1846, simple_loss=0.2555, pruned_loss=0.05688, over 19789.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2418, pruned_loss=0.04317, over 3949631.80 frames. ], batch size: 211, lr: 5.38e-03, grad_scale: 8.0 +2023-03-29 00:26:48,380 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53408.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:26:51,069 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5199, 4.2091, 4.3028, 4.5269, 4.2858, 4.6567, 4.6075, 4.7946], + device='cuda:0'), covar=tensor([0.0606, 0.0395, 0.0476, 0.0336, 0.0640, 0.0402, 0.0388, 0.0299], + device='cuda:0'), in_proj_covar=tensor([0.0149, 0.0173, 0.0199, 0.0173, 0.0170, 0.0155, 0.0149, 0.0195], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 00:26:58,574 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53412.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:27:27,174 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.651e+02 3.715e+02 4.371e+02 5.322e+02 9.816e+02, threshold=8.743e+02, percent-clipped=2.0 +2023-03-29 00:27:38,614 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53428.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 00:28:09,464 INFO [train.py:892] (0/4) Epoch 29, batch 1500, loss[loss=0.1803, simple_loss=0.2561, pruned_loss=0.05227, over 19746.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2413, pruned_loss=0.04311, over 3950284.10 frames. ], batch size: 273, lr: 5.37e-03, grad_scale: 8.0 +2023-03-29 00:28:49,028 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53456.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:29:35,102 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.73 vs. limit=5.0 +2023-03-29 00:30:14,157 INFO [train.py:892] (0/4) Epoch 29, batch 1550, loss[loss=0.1422, simple_loss=0.2179, pruned_loss=0.03325, over 19803.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.242, pruned_loss=0.04387, over 3950242.83 frames. ], batch size: 117, lr: 5.37e-03, grad_scale: 8.0 +2023-03-29 00:31:28,849 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.518e+02 3.925e+02 4.450e+02 5.196e+02 1.158e+03, threshold=8.900e+02, percent-clipped=2.0 +2023-03-29 00:32:15,027 INFO [train.py:892] (0/4) Epoch 29, batch 1600, loss[loss=0.1795, simple_loss=0.2519, pruned_loss=0.05354, over 19734.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.242, pruned_loss=0.04363, over 3950553.58 frames. ], batch size: 269, lr: 5.37e-03, grad_scale: 8.0 +2023-03-29 00:33:31,743 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4147, 2.0727, 3.3769, 2.7902, 3.3784, 3.4308, 3.1443, 3.2864], + device='cuda:0'), covar=tensor([0.0824, 0.1187, 0.0130, 0.0460, 0.0158, 0.0247, 0.0237, 0.0193], + device='cuda:0'), in_proj_covar=tensor([0.0098, 0.0102, 0.0087, 0.0154, 0.0085, 0.0097, 0.0090, 0.0085], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 00:34:19,709 INFO [train.py:892] (0/4) Epoch 29, batch 1650, loss[loss=0.1799, simple_loss=0.2517, pruned_loss=0.05407, over 19772.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2415, pruned_loss=0.04316, over 3950503.35 frames. ], batch size: 66, lr: 5.37e-03, grad_scale: 8.0 +2023-03-29 00:34:36,223 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53597.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:34:38,504 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4730, 3.3874, 3.7453, 3.4532, 3.2663, 3.6870, 3.5236, 3.8072], + device='cuda:0'), covar=tensor([0.0881, 0.0396, 0.0429, 0.0453, 0.1348, 0.0617, 0.0522, 0.0427], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0224, 0.0224, 0.0236, 0.0209, 0.0242, 0.0233, 0.0215], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 00:35:41,047 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.655e+02 3.870e+02 4.535e+02 5.487e+02 1.255e+03, threshold=9.070e+02, percent-clipped=2.0 +2023-03-29 00:36:23,480 INFO [train.py:892] (0/4) Epoch 29, batch 1700, loss[loss=0.182, simple_loss=0.2737, pruned_loss=0.04517, over 19524.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2432, pruned_loss=0.04372, over 3949625.77 frames. ], batch size: 54, lr: 5.36e-03, grad_scale: 8.0 +2023-03-29 00:37:02,994 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-29 00:37:11,562 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53660.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:37:26,689 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-03-29 00:37:40,124 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53671.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:38:27,048 INFO [train.py:892] (0/4) Epoch 29, batch 1750, loss[loss=0.1558, simple_loss=0.2342, pruned_loss=0.0387, over 19847.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.2441, pruned_loss=0.04414, over 3949459.85 frames. ], batch size: 57, lr: 5.36e-03, grad_scale: 8.0 +2023-03-29 00:39:14,587 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53712.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:39:37,570 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53723.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 00:39:38,984 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.549e+02 3.643e+02 4.593e+02 5.639e+02 1.111e+03, threshold=9.186e+02, percent-clipped=1.0 +2023-03-29 00:39:43,968 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8105, 3.0148, 2.6968, 2.1520, 2.6561, 2.9262, 2.8811, 2.9276], + device='cuda:0'), covar=tensor([0.0284, 0.0301, 0.0254, 0.0537, 0.0340, 0.0295, 0.0241, 0.0228], + device='cuda:0'), in_proj_covar=tensor([0.0101, 0.0095, 0.0098, 0.0101, 0.0104, 0.0085, 0.0084, 0.0086], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 00:40:13,012 INFO [train.py:892] (0/4) Epoch 29, batch 1800, loss[loss=0.1464, simple_loss=0.2279, pruned_loss=0.0324, over 19845.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2436, pruned_loss=0.04361, over 3946519.51 frames. ], batch size: 104, lr: 5.36e-03, grad_scale: 8.0 +2023-03-29 00:40:53,722 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53760.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:41:55,155 INFO [train.py:892] (0/4) Epoch 29, batch 1850, loss[loss=0.1939, simple_loss=0.2843, pruned_loss=0.05169, over 19584.00 frames. ], tot_loss[loss=0.1661, simple_loss=0.2449, pruned_loss=0.04366, over 3945946.38 frames. ], batch size: 53, lr: 5.36e-03, grad_scale: 8.0 +2023-03-29 00:42:02,967 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-29.pt +2023-03-29 00:42:59,946 INFO [train.py:892] (0/4) Epoch 30, batch 0, loss[loss=0.152, simple_loss=0.2303, pruned_loss=0.03689, over 19743.00 frames. ], tot_loss[loss=0.152, simple_loss=0.2303, pruned_loss=0.03689, over 19743.00 frames. ], batch size: 44, lr: 5.27e-03, grad_scale: 8.0 +2023-03-29 00:42:59,947 INFO [train.py:917] (0/4) Computing validation loss +2023-03-29 00:43:13,149 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0544, 2.9205, 4.5449, 3.3769, 3.7620, 3.3951, 2.4537, 2.5787], + device='cuda:0'), covar=tensor([0.1078, 0.3689, 0.0559, 0.1095, 0.1845, 0.1711, 0.2881, 0.2883], + device='cuda:0'), in_proj_covar=tensor([0.0352, 0.0388, 0.0348, 0.0285, 0.0373, 0.0374, 0.0371, 0.0340], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 00:43:32,679 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2069, 2.8176, 2.9062, 3.0856, 3.0633, 2.8163, 4.2224, 4.5272], + device='cuda:0'), covar=tensor([0.1241, 0.1635, 0.1564, 0.2146, 0.2071, 0.2044, 0.0575, 0.0347], + device='cuda:0'), in_proj_covar=tensor([0.0250, 0.0238, 0.0266, 0.0252, 0.0295, 0.0256, 0.0232, 0.0252], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 00:43:35,880 INFO [train.py:926] (0/4) Epoch 30, validation: loss=0.1794, simple_loss=0.2489, pruned_loss=0.05491, over 2883724.00 frames. +2023-03-29 00:43:35,881 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-29 00:43:50,058 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2433, 4.2931, 2.7220, 4.5040, 4.7683, 2.1401, 4.0697, 3.5900], + device='cuda:0'), covar=tensor([0.0762, 0.0850, 0.2566, 0.1115, 0.0630, 0.2734, 0.0925, 0.0896], + device='cuda:0'), in_proj_covar=tensor([0.0233, 0.0255, 0.0231, 0.0274, 0.0253, 0.0204, 0.0240, 0.0197], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 00:44:13,141 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2688, 3.9065, 3.9861, 4.2313, 4.0572, 4.2745, 4.2615, 4.5412], + device='cuda:0'), covar=tensor([0.0623, 0.0463, 0.0562, 0.0355, 0.0652, 0.0510, 0.0474, 0.0300], + device='cuda:0'), in_proj_covar=tensor([0.0151, 0.0175, 0.0202, 0.0174, 0.0172, 0.0157, 0.0150, 0.0197], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 00:44:26,753 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-03-29 00:44:48,485 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.508e+02 3.762e+02 4.456e+02 5.146e+02 7.887e+02, threshold=8.911e+02, percent-clipped=0.0 +2023-03-29 00:45:19,502 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8490, 2.4076, 2.6723, 3.0996, 3.5502, 3.7757, 3.6016, 3.7340], + device='cuda:0'), covar=tensor([0.0916, 0.1463, 0.1285, 0.0652, 0.0409, 0.0284, 0.0425, 0.0378], + device='cuda:0'), in_proj_covar=tensor([0.0158, 0.0170, 0.0178, 0.0151, 0.0134, 0.0131, 0.0122, 0.0114], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 00:45:28,733 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3494, 3.4460, 2.1722, 3.5480, 3.6432, 1.7714, 3.0382, 2.8694], + device='cuda:0'), covar=tensor([0.0816, 0.0934, 0.2723, 0.0836, 0.0643, 0.2680, 0.1126, 0.0866], + device='cuda:0'), in_proj_covar=tensor([0.0232, 0.0254, 0.0230, 0.0274, 0.0252, 0.0204, 0.0239, 0.0197], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 00:45:46,649 INFO [train.py:892] (0/4) Epoch 30, batch 50, loss[loss=0.1523, simple_loss=0.2328, pruned_loss=0.03586, over 19849.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2421, pruned_loss=0.04448, over 889753.24 frames. ], batch size: 81, lr: 5.26e-03, grad_scale: 8.0 +2023-03-29 00:47:42,287 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2701, 3.4461, 3.0535, 2.5916, 2.9562, 3.2578, 3.3766, 3.3552], + device='cuda:0'), covar=tensor([0.0240, 0.0286, 0.0257, 0.0477, 0.0322, 0.0315, 0.0208, 0.0227], + device='cuda:0'), in_proj_covar=tensor([0.0102, 0.0096, 0.0099, 0.0101, 0.0105, 0.0085, 0.0085, 0.0086], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 00:47:46,005 INFO [train.py:892] (0/4) Epoch 30, batch 100, loss[loss=0.1701, simple_loss=0.2472, pruned_loss=0.04649, over 19575.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2412, pruned_loss=0.04253, over 1568550.32 frames. ], batch size: 42, lr: 5.26e-03, grad_scale: 8.0 +2023-03-29 00:47:49,328 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53897.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:48:56,408 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.692e+02 3.728e+02 4.256e+02 5.465e+02 1.327e+03, threshold=8.513e+02, percent-clipped=4.0 +2023-03-29 00:49:35,529 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8572, 3.1130, 3.1400, 3.1842, 3.0197, 3.1763, 3.0448, 3.3261], + device='cuda:0'), covar=tensor([0.0307, 0.0357, 0.0281, 0.0265, 0.0359, 0.0307, 0.0338, 0.0253], + device='cuda:0'), in_proj_covar=tensor([0.0085, 0.0079, 0.0082, 0.0075, 0.0089, 0.0081, 0.0098, 0.0071], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 00:49:45,883 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3114, 4.2997, 2.5806, 4.6221, 4.8680, 2.1472, 4.0482, 3.6510], + device='cuda:0'), covar=tensor([0.0630, 0.0894, 0.2783, 0.0721, 0.0500, 0.2844, 0.0990, 0.0825], + device='cuda:0'), in_proj_covar=tensor([0.0234, 0.0257, 0.0232, 0.0276, 0.0254, 0.0205, 0.0241, 0.0198], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 00:49:49,064 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53945.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:49:51,899 INFO [train.py:892] (0/4) Epoch 30, batch 150, loss[loss=0.148, simple_loss=0.2219, pruned_loss=0.03706, over 19833.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2401, pruned_loss=0.04228, over 2094709.24 frames. ], batch size: 128, lr: 5.26e-03, grad_scale: 8.0 +2023-03-29 00:50:27,087 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53960.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:50:54,491 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53971.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:51:57,043 INFO [train.py:892] (0/4) Epoch 30, batch 200, loss[loss=0.1575, simple_loss=0.2343, pruned_loss=0.04041, over 19717.00 frames. ], tot_loss[loss=0.163, simple_loss=0.2407, pruned_loss=0.04261, over 2506330.56 frames. ], batch size: 109, lr: 5.26e-03, grad_scale: 8.0 +2023-03-29 00:52:06,626 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-54000.pt +2023-03-29 00:52:31,889 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=54008.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:52:58,707 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=54019.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:53:09,724 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=54023.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 00:53:11,543 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.594e+02 3.930e+02 4.416e+02 5.090e+02 1.215e+03, threshold=8.832e+02, percent-clipped=2.0 +2023-03-29 00:53:25,823 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8822, 4.5739, 4.6224, 4.3098, 4.8447, 3.2561, 3.9378, 2.4491], + device='cuda:0'), covar=tensor([0.0180, 0.0207, 0.0138, 0.0189, 0.0131, 0.0879, 0.0844, 0.1458], + device='cuda:0'), in_proj_covar=tensor([0.0105, 0.0146, 0.0114, 0.0134, 0.0120, 0.0135, 0.0144, 0.0128], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 00:54:01,460 INFO [train.py:892] (0/4) Epoch 30, batch 250, loss[loss=0.2233, simple_loss=0.2897, pruned_loss=0.07843, over 19719.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2409, pruned_loss=0.04258, over 2827443.02 frames. ], batch size: 310, lr: 5.25e-03, grad_scale: 8.0 +2023-03-29 00:54:14,139 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6716, 4.6896, 2.8326, 4.9930, 5.1681, 2.3046, 4.3959, 3.6582], + device='cuda:0'), covar=tensor([0.0574, 0.0649, 0.2461, 0.0552, 0.0362, 0.2713, 0.0846, 0.0898], + device='cuda:0'), in_proj_covar=tensor([0.0236, 0.0258, 0.0233, 0.0277, 0.0255, 0.0206, 0.0241, 0.0199], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 00:54:59,828 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.8975, 1.9016, 1.9653, 2.0005, 1.9156, 2.0322, 1.9092, 2.0193], + device='cuda:0'), covar=tensor([0.0369, 0.0325, 0.0343, 0.0313, 0.0433, 0.0324, 0.0425, 0.0346], + device='cuda:0'), in_proj_covar=tensor([0.0085, 0.0078, 0.0082, 0.0075, 0.0089, 0.0081, 0.0098, 0.0071], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 00:55:04,689 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=54071.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 00:56:07,032 INFO [train.py:892] (0/4) Epoch 30, batch 300, loss[loss=0.16, simple_loss=0.2242, pruned_loss=0.04787, over 19765.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2423, pruned_loss=0.04307, over 3074690.35 frames. ], batch size: 125, lr: 5.25e-03, grad_scale: 8.0 +2023-03-29 00:56:08,012 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.2301, 2.5546, 2.3161, 1.6933, 2.3123, 2.4265, 2.4598, 2.5322], + device='cuda:0'), covar=tensor([0.0401, 0.0297, 0.0325, 0.0640, 0.0401, 0.0320, 0.0281, 0.0239], + device='cuda:0'), in_proj_covar=tensor([0.0101, 0.0096, 0.0098, 0.0101, 0.0104, 0.0085, 0.0085, 0.0086], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 00:56:28,192 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-29 00:57:14,795 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.600e+02 3.558e+02 4.130e+02 5.342e+02 1.010e+03, threshold=8.261e+02, percent-clipped=1.0 +2023-03-29 00:58:16,904 INFO [train.py:892] (0/4) Epoch 30, batch 350, loss[loss=0.2527, simple_loss=0.3251, pruned_loss=0.0901, over 19429.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.243, pruned_loss=0.04341, over 3268602.32 frames. ], batch size: 412, lr: 5.25e-03, grad_scale: 8.0 +2023-03-29 00:58:53,184 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4918, 2.5409, 2.6527, 2.0915, 2.7750, 2.3183, 2.6484, 2.5678], + device='cuda:0'), covar=tensor([0.0567, 0.0488, 0.0461, 0.0832, 0.0329, 0.0482, 0.0446, 0.0376], + device='cuda:0'), in_proj_covar=tensor([0.0077, 0.0084, 0.0082, 0.0109, 0.0078, 0.0080, 0.0078, 0.0070], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 00:59:06,560 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2983, 4.2016, 4.1321, 3.8733, 4.3245, 3.0157, 3.5598, 1.9438], + device='cuda:0'), covar=tensor([0.0277, 0.0245, 0.0205, 0.0252, 0.0226, 0.1152, 0.0899, 0.1986], + device='cuda:0'), in_proj_covar=tensor([0.0104, 0.0145, 0.0114, 0.0134, 0.0119, 0.0134, 0.0143, 0.0128], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 01:00:25,807 INFO [train.py:892] (0/4) Epoch 30, batch 400, loss[loss=0.1566, simple_loss=0.2307, pruned_loss=0.04126, over 19823.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.242, pruned_loss=0.04293, over 3420853.09 frames. ], batch size: 143, lr: 5.25e-03, grad_scale: 8.0 +2023-03-29 01:00:35,444 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-29 01:00:41,402 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7789, 3.6644, 4.0618, 3.7076, 3.4961, 3.9736, 3.7861, 4.1244], + device='cuda:0'), covar=tensor([0.0824, 0.0390, 0.0373, 0.0423, 0.1242, 0.0606, 0.0508, 0.0372], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0221, 0.0223, 0.0234, 0.0208, 0.0241, 0.0230, 0.0214], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 01:01:24,870 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.59 vs. limit=2.0 +2023-03-29 01:01:38,160 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.477e+02 3.958e+02 4.510e+02 5.368e+02 1.015e+03, threshold=9.019e+02, percent-clipped=3.0 +2023-03-29 01:02:36,033 INFO [train.py:892] (0/4) Epoch 30, batch 450, loss[loss=0.15, simple_loss=0.2328, pruned_loss=0.03361, over 19853.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2422, pruned_loss=0.04303, over 3538262.08 frames. ], batch size: 85, lr: 5.24e-03, grad_scale: 8.0 +2023-03-29 01:04:30,032 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4680, 2.4651, 1.7066, 2.7547, 2.6207, 2.6445, 2.7848, 2.2473], + device='cuda:0'), covar=tensor([0.0709, 0.0803, 0.1416, 0.0648, 0.0673, 0.0641, 0.0673, 0.0999], + device='cuda:0'), in_proj_covar=tensor([0.0143, 0.0143, 0.0143, 0.0152, 0.0133, 0.0134, 0.0146, 0.0145], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 01:04:31,210 INFO [train.py:892] (0/4) Epoch 30, batch 500, loss[loss=0.183, simple_loss=0.2643, pruned_loss=0.05089, over 19710.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2418, pruned_loss=0.04318, over 3630893.39 frames. ], batch size: 310, lr: 5.24e-03, grad_scale: 16.0 +2023-03-29 01:05:11,654 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-29 01:05:41,307 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.522e+02 3.919e+02 4.339e+02 5.578e+02 1.318e+03, threshold=8.679e+02, percent-clipped=2.0 +2023-03-29 01:06:34,649 INFO [train.py:892] (0/4) Epoch 30, batch 550, loss[loss=0.1643, simple_loss=0.2489, pruned_loss=0.03985, over 19671.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.2417, pruned_loss=0.04341, over 3701648.23 frames. ], batch size: 55, lr: 5.24e-03, grad_scale: 16.0 +2023-03-29 01:07:39,080 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7553, 4.0503, 4.1651, 4.8718, 3.2685, 3.5394, 3.2190, 2.9656], + device='cuda:0'), covar=tensor([0.0451, 0.1851, 0.0802, 0.0317, 0.1867, 0.1102, 0.1121, 0.1598], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0328, 0.0246, 0.0200, 0.0245, 0.0206, 0.0215, 0.0214], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 01:08:37,644 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2388, 4.9700, 4.9218, 5.2833, 4.9759, 5.5147, 5.4081, 5.6234], + device='cuda:0'), covar=tensor([0.0617, 0.0332, 0.0421, 0.0270, 0.0641, 0.0302, 0.0380, 0.0253], + device='cuda:0'), in_proj_covar=tensor([0.0147, 0.0171, 0.0198, 0.0171, 0.0168, 0.0154, 0.0147, 0.0194], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 01:08:41,072 INFO [train.py:892] (0/4) Epoch 30, batch 600, loss[loss=0.1667, simple_loss=0.2481, pruned_loss=0.04267, over 19611.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.2428, pruned_loss=0.04369, over 3756507.09 frames. ], batch size: 46, lr: 5.24e-03, grad_scale: 16.0 +2023-03-29 01:09:49,428 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.185e+02 3.804e+02 4.477e+02 5.601e+02 8.237e+02, threshold=8.953e+02, percent-clipped=0.0 +2023-03-29 01:09:53,066 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-29 01:10:46,706 INFO [train.py:892] (0/4) Epoch 30, batch 650, loss[loss=0.1689, simple_loss=0.236, pruned_loss=0.05086, over 19851.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.2427, pruned_loss=0.04358, over 3799371.03 frames. ], batch size: 124, lr: 5.23e-03, grad_scale: 16.0 +2023-03-29 01:11:10,212 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-29 01:12:01,957 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2594, 3.5712, 3.6598, 4.2340, 2.8690, 3.3389, 2.5933, 2.5488], + device='cuda:0'), covar=tensor([0.0511, 0.1973, 0.0886, 0.0374, 0.1978, 0.0937, 0.1414, 0.1694], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0329, 0.0246, 0.0200, 0.0246, 0.0207, 0.0215, 0.0215], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 01:12:06,927 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1608, 4.2923, 2.4456, 4.5195, 4.6817, 1.9706, 3.8998, 3.3905], + device='cuda:0'), covar=tensor([0.0706, 0.0689, 0.2899, 0.0656, 0.0520, 0.2966, 0.1037, 0.0909], + device='cuda:0'), in_proj_covar=tensor([0.0230, 0.0252, 0.0228, 0.0271, 0.0250, 0.0201, 0.0236, 0.0195], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 01:12:28,330 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-29 01:12:35,643 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7236, 2.2237, 2.6657, 2.9706, 3.4402, 3.6920, 3.5791, 3.5559], + device='cuda:0'), covar=tensor([0.1048, 0.1733, 0.1308, 0.0746, 0.0424, 0.0266, 0.0391, 0.0452], + device='cuda:0'), in_proj_covar=tensor([0.0160, 0.0172, 0.0178, 0.0152, 0.0135, 0.0132, 0.0123, 0.0116], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 01:12:48,856 INFO [train.py:892] (0/4) Epoch 30, batch 700, loss[loss=0.1488, simple_loss=0.2373, pruned_loss=0.03021, over 19760.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2435, pruned_loss=0.04355, over 3831021.90 frames. ], batch size: 100, lr: 5.23e-03, grad_scale: 16.0 +2023-03-29 01:13:56,006 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3542, 5.6212, 5.6480, 5.5574, 5.3114, 5.6055, 5.0951, 5.1189], + device='cuda:0'), covar=tensor([0.0385, 0.0397, 0.0451, 0.0377, 0.0492, 0.0498, 0.0648, 0.0922], + device='cuda:0'), in_proj_covar=tensor([0.0266, 0.0283, 0.0293, 0.0257, 0.0261, 0.0247, 0.0266, 0.0310], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 01:13:59,623 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.972e+02 4.007e+02 4.588e+02 5.401e+02 1.198e+03, threshold=9.175e+02, percent-clipped=2.0 +2023-03-29 01:14:32,821 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9849, 3.0129, 4.5158, 3.4958, 3.7193, 3.5408, 2.4695, 2.6871], + device='cuda:0'), covar=tensor([0.1025, 0.2890, 0.0503, 0.0981, 0.1666, 0.1351, 0.2521, 0.2610], + device='cuda:0'), in_proj_covar=tensor([0.0352, 0.0388, 0.0348, 0.0284, 0.0372, 0.0372, 0.0371, 0.0340], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 01:14:53,548 INFO [train.py:892] (0/4) Epoch 30, batch 750, loss[loss=0.1388, simple_loss=0.2159, pruned_loss=0.03086, over 19852.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2435, pruned_loss=0.04354, over 3857293.16 frames. ], batch size: 165, lr: 5.23e-03, grad_scale: 16.0 +2023-03-29 01:17:01,213 INFO [train.py:892] (0/4) Epoch 30, batch 800, loss[loss=0.178, simple_loss=0.2682, pruned_loss=0.04387, over 19852.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2436, pruned_loss=0.04381, over 3878947.89 frames. ], batch size: 58, lr: 5.23e-03, grad_scale: 16.0 +2023-03-29 01:18:07,888 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.698e+02 3.768e+02 4.525e+02 5.531e+02 9.208e+02, threshold=9.049e+02, percent-clipped=1.0 +2023-03-29 01:19:02,290 INFO [train.py:892] (0/4) Epoch 30, batch 850, loss[loss=0.161, simple_loss=0.2366, pruned_loss=0.04274, over 19765.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.2439, pruned_loss=0.04383, over 3894218.68 frames. ], batch size: 217, lr: 5.22e-03, grad_scale: 16.0 +2023-03-29 01:19:48,267 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54664.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 01:20:19,724 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8552, 2.3841, 2.8791, 3.0979, 3.5824, 3.8681, 3.7323, 3.7833], + device='cuda:0'), covar=tensor([0.1028, 0.1671, 0.1262, 0.0698, 0.0423, 0.0288, 0.0406, 0.0435], + device='cuda:0'), in_proj_covar=tensor([0.0158, 0.0169, 0.0176, 0.0150, 0.0134, 0.0130, 0.0121, 0.0114], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 01:21:04,086 INFO [train.py:892] (0/4) Epoch 30, batch 900, loss[loss=0.2125, simple_loss=0.3205, pruned_loss=0.05227, over 18004.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.2431, pruned_loss=0.04331, over 3905608.38 frames. ], batch size: 633, lr: 5.22e-03, grad_scale: 16.0 +2023-03-29 01:21:23,562 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9376, 2.8634, 3.2342, 2.4866, 3.2843, 2.6885, 3.0263, 3.1283], + device='cuda:0'), covar=tensor([0.0666, 0.0553, 0.0519, 0.0825, 0.0413, 0.0502, 0.0480, 0.0354], + device='cuda:0'), in_proj_covar=tensor([0.0078, 0.0085, 0.0083, 0.0111, 0.0079, 0.0082, 0.0080, 0.0071], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 01:22:16,864 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.427e+02 3.830e+02 4.433e+02 5.548e+02 1.086e+03, threshold=8.866e+02, percent-clipped=1.0 +2023-03-29 01:22:22,574 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54725.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 01:23:12,082 INFO [train.py:892] (0/4) Epoch 30, batch 950, loss[loss=0.1465, simple_loss=0.2256, pruned_loss=0.03366, over 19869.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2434, pruned_loss=0.04332, over 3914844.58 frames. ], batch size: 92, lr: 5.22e-03, grad_scale: 16.0 +2023-03-29 01:24:22,237 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9127, 2.9291, 3.2056, 2.5415, 3.2896, 2.7527, 3.0982, 3.1994], + device='cuda:0'), covar=tensor([0.0622, 0.0509, 0.0582, 0.0781, 0.0408, 0.0472, 0.0443, 0.0295], + device='cuda:0'), in_proj_covar=tensor([0.0078, 0.0085, 0.0083, 0.0111, 0.0079, 0.0082, 0.0080, 0.0071], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 01:25:12,162 INFO [train.py:892] (0/4) Epoch 30, batch 1000, loss[loss=0.1677, simple_loss=0.2595, pruned_loss=0.03799, over 19532.00 frames. ], tot_loss[loss=0.1648, simple_loss=0.2432, pruned_loss=0.04321, over 3922133.81 frames. ], batch size: 54, lr: 5.22e-03, grad_scale: 16.0 +2023-03-29 01:26:22,823 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.928e+02 3.838e+02 4.641e+02 5.585e+02 1.320e+03, threshold=9.281e+02, percent-clipped=3.0 +2023-03-29 01:27:18,745 INFO [train.py:892] (0/4) Epoch 30, batch 1050, loss[loss=0.179, simple_loss=0.2526, pruned_loss=0.0527, over 19794.00 frames. ], tot_loss[loss=0.1664, simple_loss=0.2449, pruned_loss=0.04396, over 3928031.25 frames. ], batch size: 68, lr: 5.21e-03, grad_scale: 16.0 +2023-03-29 01:27:59,453 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54862.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:29:25,258 INFO [train.py:892] (0/4) Epoch 30, batch 1100, loss[loss=0.1609, simple_loss=0.2472, pruned_loss=0.03725, over 19657.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2428, pruned_loss=0.04331, over 3933767.37 frames. ], batch size: 58, lr: 5.21e-03, grad_scale: 16.0 +2023-03-29 01:30:36,679 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54923.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 01:30:38,517 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.418e+02 3.807e+02 4.386e+02 5.197e+02 8.292e+02, threshold=8.772e+02, percent-clipped=0.0 +2023-03-29 01:31:31,549 INFO [train.py:892] (0/4) Epoch 30, batch 1150, loss[loss=0.1467, simple_loss=0.2222, pruned_loss=0.03558, over 19839.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.243, pruned_loss=0.04365, over 3936903.33 frames. ], batch size: 142, lr: 5.21e-03, grad_scale: 16.0 +2023-03-29 01:31:43,015 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5134, 2.6675, 3.8587, 3.1026, 3.2909, 3.1035, 2.2527, 2.3943], + device='cuda:0'), covar=tensor([0.1125, 0.3161, 0.0673, 0.1053, 0.1661, 0.1533, 0.2629, 0.2760], + device='cuda:0'), in_proj_covar=tensor([0.0352, 0.0387, 0.0348, 0.0285, 0.0372, 0.0374, 0.0372, 0.0340], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 01:32:31,225 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1352, 2.4203, 2.2448, 1.6352, 2.2214, 2.3823, 2.2720, 2.3669], + device='cuda:0'), covar=tensor([0.0477, 0.0345, 0.0320, 0.0630, 0.0397, 0.0327, 0.0316, 0.0295], + device='cuda:0'), in_proj_covar=tensor([0.0101, 0.0096, 0.0098, 0.0101, 0.0104, 0.0086, 0.0085, 0.0086], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 01:33:13,007 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.1582, 4.8937, 4.8576, 5.2705, 4.9720, 5.5774, 5.2601, 5.5230], + device='cuda:0'), covar=tensor([0.0795, 0.0512, 0.0621, 0.0481, 0.0734, 0.0374, 0.0762, 0.0453], + device='cuda:0'), in_proj_covar=tensor([0.0150, 0.0175, 0.0202, 0.0174, 0.0172, 0.0157, 0.0149, 0.0198], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 01:33:31,565 INFO [train.py:892] (0/4) Epoch 30, batch 1200, loss[loss=0.1545, simple_loss=0.238, pruned_loss=0.0355, over 19741.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2421, pruned_loss=0.04337, over 3940422.54 frames. ], batch size: 80, lr: 5.21e-03, grad_scale: 16.0 +2023-03-29 01:34:33,497 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55020.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 01:34:43,798 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.409e+02 3.740e+02 4.299e+02 5.341e+02 1.989e+03, threshold=8.597e+02, percent-clipped=2.0 +2023-03-29 01:35:37,498 INFO [train.py:892] (0/4) Epoch 30, batch 1250, loss[loss=0.1725, simple_loss=0.2427, pruned_loss=0.0511, over 19873.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2412, pruned_loss=0.04325, over 3943519.54 frames. ], batch size: 165, lr: 5.21e-03, grad_scale: 16.0 +2023-03-29 01:35:42,198 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6533, 3.5370, 3.9255, 3.6180, 3.3762, 3.8340, 3.6796, 3.9854], + device='cuda:0'), covar=tensor([0.0865, 0.0437, 0.0400, 0.0408, 0.1277, 0.0557, 0.0508, 0.0406], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0225, 0.0224, 0.0235, 0.0210, 0.0243, 0.0232, 0.0217], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 01:37:26,026 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3352, 3.3328, 5.1023, 3.7276, 4.0983, 3.8458, 2.8322, 2.9454], + device='cuda:0'), covar=tensor([0.0881, 0.2871, 0.0387, 0.0940, 0.1528, 0.1221, 0.2282, 0.2433], + device='cuda:0'), in_proj_covar=tensor([0.0349, 0.0384, 0.0344, 0.0282, 0.0369, 0.0371, 0.0369, 0.0337], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 01:37:48,024 INFO [train.py:892] (0/4) Epoch 30, batch 1300, loss[loss=0.1576, simple_loss=0.2387, pruned_loss=0.03825, over 19640.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2424, pruned_loss=0.04339, over 3942182.72 frames. ], batch size: 79, lr: 5.20e-03, grad_scale: 16.0 +2023-03-29 01:38:55,047 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.487e+02 3.571e+02 4.407e+02 5.506e+02 1.177e+03, threshold=8.814e+02, percent-clipped=1.0 +2023-03-29 01:39:32,219 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.90 vs. limit=5.0 +2023-03-29 01:39:52,086 INFO [train.py:892] (0/4) Epoch 30, batch 1350, loss[loss=0.1587, simple_loss=0.2417, pruned_loss=0.0379, over 19721.00 frames. ], tot_loss[loss=0.1648, simple_loss=0.2427, pruned_loss=0.04341, over 3945401.65 frames. ], batch size: 54, lr: 5.20e-03, grad_scale: 16.0 +2023-03-29 01:39:57,714 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55148.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:39:57,837 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4365, 2.7669, 2.9183, 3.3294, 2.3866, 2.9703, 2.1764, 2.2066], + device='cuda:0'), covar=tensor([0.0581, 0.1525, 0.1066, 0.0472, 0.2058, 0.0772, 0.1364, 0.1622], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0331, 0.0247, 0.0201, 0.0247, 0.0208, 0.0216, 0.0218], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 01:41:04,294 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3598, 3.3786, 4.9845, 3.8373, 4.0225, 3.7121, 2.6984, 2.8593], + device='cuda:0'), covar=tensor([0.0860, 0.2787, 0.0379, 0.0861, 0.1501, 0.1390, 0.2501, 0.2478], + device='cuda:0'), in_proj_covar=tensor([0.0352, 0.0388, 0.0346, 0.0284, 0.0372, 0.0374, 0.0372, 0.0340], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 01:41:51,206 INFO [train.py:892] (0/4) Epoch 30, batch 1400, loss[loss=0.2431, simple_loss=0.3173, pruned_loss=0.08445, over 19414.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2424, pruned_loss=0.04326, over 3947933.70 frames. ], batch size: 412, lr: 5.20e-03, grad_scale: 16.0 +2023-03-29 01:42:27,848 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55209.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:42:34,470 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55211.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:42:51,450 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55218.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 01:43:04,802 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.260e+02 3.753e+02 4.356e+02 5.536e+02 1.082e+03, threshold=8.713e+02, percent-clipped=2.0 +2023-03-29 01:44:02,196 INFO [train.py:892] (0/4) Epoch 30, batch 1450, loss[loss=0.142, simple_loss=0.2227, pruned_loss=0.0306, over 19557.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2432, pruned_loss=0.04337, over 3945466.66 frames. ], batch size: 60, lr: 5.20e-03, grad_scale: 16.0 +2023-03-29 01:44:55,330 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8687, 3.3495, 3.6605, 3.2654, 3.9599, 4.0427, 4.7184, 5.0944], + device='cuda:0'), covar=tensor([0.0455, 0.1467, 0.1370, 0.2042, 0.1593, 0.1299, 0.0486, 0.0563], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0238, 0.0266, 0.0252, 0.0294, 0.0257, 0.0231, 0.0254], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 01:45:06,662 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55272.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:46:06,664 INFO [train.py:892] (0/4) Epoch 30, batch 1500, loss[loss=0.1477, simple_loss=0.2252, pruned_loss=0.03505, over 19859.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.242, pruned_loss=0.04319, over 3946990.53 frames. ], batch size: 104, lr: 5.19e-03, grad_scale: 16.0 +2023-03-29 01:47:07,588 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55320.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 01:47:16,425 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.567e+02 3.622e+02 4.364e+02 5.637e+02 1.011e+03, threshold=8.727e+02, percent-clipped=2.0 +2023-03-29 01:47:21,443 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3548, 5.6787, 5.8454, 5.6330, 5.5136, 5.5011, 5.4555, 5.3892], + device='cuda:0'), covar=tensor([0.1335, 0.1223, 0.1013, 0.1310, 0.0678, 0.0846, 0.2100, 0.2023], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0328, 0.0367, 0.0296, 0.0273, 0.0278, 0.0356, 0.0387], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 01:47:38,896 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.3232, 1.8026, 2.1083, 2.5437, 2.7871, 2.8983, 2.8000, 2.7980], + device='cuda:0'), covar=tensor([0.1096, 0.1941, 0.1528, 0.0825, 0.0574, 0.0415, 0.0475, 0.0627], + device='cuda:0'), in_proj_covar=tensor([0.0159, 0.0170, 0.0177, 0.0152, 0.0136, 0.0131, 0.0123, 0.0116], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 01:47:52,592 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0214, 3.8506, 3.8446, 3.6352, 3.9987, 2.8386, 3.2950, 1.8788], + device='cuda:0'), covar=tensor([0.0213, 0.0242, 0.0159, 0.0205, 0.0157, 0.1132, 0.0744, 0.1721], + device='cuda:0'), in_proj_covar=tensor([0.0104, 0.0145, 0.0113, 0.0133, 0.0118, 0.0134, 0.0143, 0.0127], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 01:48:09,059 INFO [train.py:892] (0/4) Epoch 30, batch 1550, loss[loss=0.1589, simple_loss=0.2372, pruned_loss=0.04026, over 19711.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2428, pruned_loss=0.04354, over 3947329.72 frames. ], batch size: 62, lr: 5.19e-03, grad_scale: 16.0 +2023-03-29 01:48:26,196 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.49 vs. limit=5.0 +2023-03-29 01:49:02,300 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55368.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 01:50:11,366 INFO [train.py:892] (0/4) Epoch 30, batch 1600, loss[loss=0.144, simple_loss=0.2198, pruned_loss=0.03412, over 19861.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2425, pruned_loss=0.04332, over 3946868.66 frames. ], batch size: 104, lr: 5.19e-03, grad_scale: 16.0 +2023-03-29 01:51:20,595 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.706e+02 3.762e+02 4.518e+02 5.538e+02 9.560e+02, threshold=9.036e+02, percent-clipped=2.0 +2023-03-29 01:52:18,740 INFO [train.py:892] (0/4) Epoch 30, batch 1650, loss[loss=0.1661, simple_loss=0.2443, pruned_loss=0.04394, over 19810.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.242, pruned_loss=0.04281, over 3948203.94 frames. ], batch size: 98, lr: 5.19e-03, grad_scale: 16.0 +2023-03-29 01:54:24,822 INFO [train.py:892] (0/4) Epoch 30, batch 1700, loss[loss=0.159, simple_loss=0.2327, pruned_loss=0.04266, over 19799.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.242, pruned_loss=0.04284, over 3949621.95 frames. ], batch size: 107, lr: 5.18e-03, grad_scale: 16.0 +2023-03-29 01:54:44,137 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55504.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:55:19,202 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55518.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 01:55:22,535 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-29 01:55:32,304 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.784e+02 3.668e+02 4.195e+02 5.159e+02 8.554e+02, threshold=8.391e+02, percent-clipped=0.0 +2023-03-29 01:56:20,114 INFO [train.py:892] (0/4) Epoch 30, batch 1750, loss[loss=0.1627, simple_loss=0.2301, pruned_loss=0.04771, over 19875.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2417, pruned_loss=0.04294, over 3950383.60 frames. ], batch size: 125, lr: 5.18e-03, grad_scale: 16.0 +2023-03-29 01:57:01,857 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55566.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:57:03,789 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55567.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 01:58:06,245 INFO [train.py:892] (0/4) Epoch 30, batch 1800, loss[loss=0.2113, simple_loss=0.2878, pruned_loss=0.06738, over 19656.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2438, pruned_loss=0.04363, over 3947567.69 frames. ], batch size: 57, lr: 5.18e-03, grad_scale: 16.0 +2023-03-29 01:59:02,429 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.794e+02 4.015e+02 4.867e+02 6.116e+02 1.314e+03, threshold=9.734e+02, percent-clipped=8.0 +2023-03-29 01:59:44,975 INFO [train.py:892] (0/4) Epoch 30, batch 1850, loss[loss=0.1641, simple_loss=0.2471, pruned_loss=0.04053, over 19829.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.245, pruned_loss=0.04335, over 3947596.48 frames. ], batch size: 58, lr: 5.18e-03, grad_scale: 16.0 +2023-03-29 01:59:52,685 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-30.pt +2023-03-29 02:00:52,130 INFO [train.py:892] (0/4) Epoch 31, batch 0, loss[loss=0.1444, simple_loss=0.2231, pruned_loss=0.03282, over 19683.00 frames. ], tot_loss[loss=0.1444, simple_loss=0.2231, pruned_loss=0.03282, over 19683.00 frames. ], batch size: 45, lr: 5.09e-03, grad_scale: 16.0 +2023-03-29 02:00:52,132 INFO [train.py:917] (0/4) Computing validation loss +2023-03-29 02:01:29,197 INFO [train.py:926] (0/4) Epoch 31, validation: loss=0.1803, simple_loss=0.2493, pruned_loss=0.05567, over 2883724.00 frames. +2023-03-29 02:01:29,198 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-29 02:02:49,640 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1262, 3.7886, 3.9623, 4.1852, 3.8533, 4.1761, 4.2218, 4.4517], + device='cuda:0'), covar=tensor([0.0702, 0.0483, 0.0567, 0.0388, 0.0752, 0.0545, 0.0464, 0.0348], + device='cuda:0'), in_proj_covar=tensor([0.0151, 0.0176, 0.0201, 0.0174, 0.0172, 0.0157, 0.0148, 0.0197], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 02:03:38,840 INFO [train.py:892] (0/4) Epoch 31, batch 50, loss[loss=0.1523, simple_loss=0.2306, pruned_loss=0.037, over 19837.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.2307, pruned_loss=0.03918, over 892881.74 frames. ], batch size: 239, lr: 5.09e-03, grad_scale: 16.0 +2023-03-29 02:04:41,988 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.130e+02 3.666e+02 4.158e+02 5.108e+02 9.085e+02, threshold=8.317e+02, percent-clipped=0.0 +2023-03-29 02:05:06,719 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3394, 2.3350, 4.6322, 3.9767, 4.4718, 4.4751, 4.2737, 4.4409], + device='cuda:0'), covar=tensor([0.0808, 0.1451, 0.0140, 0.1035, 0.0180, 0.0274, 0.0237, 0.0167], + device='cuda:0'), in_proj_covar=tensor([0.0100, 0.0104, 0.0089, 0.0155, 0.0086, 0.0098, 0.0092, 0.0087], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 02:05:38,193 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55748.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:05:43,135 INFO [train.py:892] (0/4) Epoch 31, batch 100, loss[loss=0.1675, simple_loss=0.2445, pruned_loss=0.04522, over 19712.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2379, pruned_loss=0.04258, over 1570466.08 frames. ], batch size: 291, lr: 5.09e-03, grad_scale: 16.0 +2023-03-29 02:06:40,670 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8819, 2.9402, 4.2797, 3.3283, 3.5413, 3.4086, 2.4683, 2.6164], + device='cuda:0'), covar=tensor([0.1003, 0.3006, 0.0566, 0.1122, 0.1701, 0.1460, 0.2528, 0.2550], + device='cuda:0'), in_proj_covar=tensor([0.0352, 0.0388, 0.0347, 0.0285, 0.0372, 0.0375, 0.0373, 0.0340], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 02:06:48,075 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6669, 4.3933, 4.4647, 4.6978, 4.3195, 4.8443, 4.8188, 4.9856], + device='cuda:0'), covar=tensor([0.0662, 0.0379, 0.0446, 0.0371, 0.0683, 0.0421, 0.0365, 0.0307], + device='cuda:0'), in_proj_covar=tensor([0.0151, 0.0176, 0.0202, 0.0175, 0.0173, 0.0158, 0.0149, 0.0197], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 02:07:54,495 INFO [train.py:892] (0/4) Epoch 31, batch 150, loss[loss=0.1593, simple_loss=0.2385, pruned_loss=0.04011, over 19906.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2422, pruned_loss=0.04433, over 2097386.03 frames. ], batch size: 50, lr: 5.08e-03, grad_scale: 16.0 +2023-03-29 02:08:04,025 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55804.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:08:16,415 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55809.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:08:43,186 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5462, 4.3207, 4.3687, 4.1224, 4.5544, 3.1690, 3.7672, 2.5361], + device='cuda:0'), covar=tensor([0.0187, 0.0209, 0.0142, 0.0179, 0.0132, 0.0909, 0.0712, 0.1284], + device='cuda:0'), in_proj_covar=tensor([0.0104, 0.0146, 0.0113, 0.0134, 0.0119, 0.0135, 0.0143, 0.0127], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 02:08:48,563 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-03-29 02:08:54,010 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.568e+02 3.733e+02 4.638e+02 5.672e+02 1.192e+03, threshold=9.276e+02, percent-clipped=1.0 +2023-03-29 02:10:02,249 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55848.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:10:09,554 INFO [train.py:892] (0/4) Epoch 31, batch 200, loss[loss=0.1452, simple_loss=0.2191, pruned_loss=0.03565, over 19832.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2415, pruned_loss=0.04312, over 2508775.75 frames. ], batch size: 93, lr: 5.08e-03, grad_scale: 16.0 +2023-03-29 02:10:12,905 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55852.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:10:39,605 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9733, 2.9645, 3.1382, 2.3949, 3.1972, 2.7148, 3.0654, 3.0629], + device='cuda:0'), covar=tensor([0.0605, 0.0444, 0.0467, 0.0843, 0.0369, 0.0455, 0.0473, 0.0353], + device='cuda:0'), in_proj_covar=tensor([0.0078, 0.0085, 0.0083, 0.0110, 0.0079, 0.0081, 0.0079, 0.0071], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 02:10:53,182 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55867.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:11:20,225 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-03-29 02:12:23,369 INFO [train.py:892] (0/4) Epoch 31, batch 250, loss[loss=0.1275, simple_loss=0.2102, pruned_loss=0.02238, over 19738.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2413, pruned_loss=0.04263, over 2828030.45 frames. ], batch size: 106, lr: 5.08e-03, grad_scale: 16.0 +2023-03-29 02:12:44,113 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55909.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:12:58,911 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55915.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:13:02,147 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7683, 3.5626, 4.0604, 2.9078, 4.1947, 3.3509, 3.5344, 4.0050], + device='cuda:0'), covar=tensor([0.0690, 0.0349, 0.0550, 0.0842, 0.0338, 0.0415, 0.0639, 0.0270], + device='cuda:0'), in_proj_covar=tensor([0.0078, 0.0085, 0.0083, 0.0111, 0.0079, 0.0081, 0.0080, 0.0072], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 02:13:23,200 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.896e+02 3.622e+02 4.377e+02 5.144e+02 8.817e+02, threshold=8.755e+02, percent-clipped=0.0 +2023-03-29 02:14:32,046 INFO [train.py:892] (0/4) Epoch 31, batch 300, loss[loss=0.1605, simple_loss=0.2446, pruned_loss=0.03824, over 19642.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2401, pruned_loss=0.04179, over 3077675.03 frames. ], batch size: 72, lr: 5.08e-03, grad_scale: 16.0 +2023-03-29 02:16:38,843 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-56000.pt +2023-03-29 02:16:45,169 INFO [train.py:892] (0/4) Epoch 31, batch 350, loss[loss=0.1413, simple_loss=0.2079, pruned_loss=0.03731, over 19839.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2395, pruned_loss=0.04077, over 3270138.49 frames. ], batch size: 144, lr: 5.08e-03, grad_scale: 16.0 +2023-03-29 02:17:11,607 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3792, 4.1859, 4.2076, 3.9530, 4.3927, 3.0731, 3.6198, 2.2077], + device='cuda:0'), covar=tensor([0.0191, 0.0210, 0.0148, 0.0207, 0.0138, 0.0930, 0.0713, 0.1415], + device='cuda:0'), in_proj_covar=tensor([0.0103, 0.0145, 0.0112, 0.0134, 0.0118, 0.0134, 0.0142, 0.0126], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 02:17:36,475 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8400, 2.7741, 1.6975, 3.3392, 3.0031, 3.2145, 3.3266, 2.6588], + device='cuda:0'), covar=tensor([0.0703, 0.0808, 0.1875, 0.0637, 0.0660, 0.0510, 0.0670, 0.0905], + device='cuda:0'), in_proj_covar=tensor([0.0144, 0.0144, 0.0144, 0.0152, 0.0132, 0.0135, 0.0147, 0.0145], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 02:17:41,467 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.448e+02 3.652e+02 4.141e+02 5.135e+02 1.534e+03, threshold=8.281e+02, percent-clipped=2.0 +2023-03-29 02:17:46,301 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6184, 3.7458, 2.2215, 3.8648, 4.0079, 1.8181, 3.3148, 3.0809], + device='cuda:0'), covar=tensor([0.0779, 0.0870, 0.2832, 0.0854, 0.0604, 0.2760, 0.1107, 0.0902], + device='cuda:0'), in_proj_covar=tensor([0.0232, 0.0255, 0.0228, 0.0272, 0.0252, 0.0203, 0.0239, 0.0197], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 02:18:47,913 INFO [train.py:892] (0/4) Epoch 31, batch 400, loss[loss=0.2712, simple_loss=0.3356, pruned_loss=0.1033, over 19456.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2398, pruned_loss=0.04116, over 3419799.75 frames. ], batch size: 431, lr: 5.07e-03, grad_scale: 16.0 +2023-03-29 02:20:53,720 INFO [train.py:892] (0/4) Epoch 31, batch 450, loss[loss=0.2583, simple_loss=0.3375, pruned_loss=0.08953, over 19251.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.241, pruned_loss=0.04184, over 3537613.75 frames. ], batch size: 483, lr: 5.07e-03, grad_scale: 16.0 +2023-03-29 02:21:03,335 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56104.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:21:56,006 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.311e+02 3.637e+02 4.331e+02 5.341e+02 9.053e+02, threshold=8.662e+02, percent-clipped=1.0 +2023-03-29 02:22:46,918 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56145.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:22:54,138 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5069, 3.4074, 3.7461, 3.4678, 3.2559, 3.6829, 3.5668, 3.8123], + device='cuda:0'), covar=tensor([0.0773, 0.0348, 0.0354, 0.0391, 0.1320, 0.0537, 0.0444, 0.0371], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0221, 0.0221, 0.0232, 0.0206, 0.0241, 0.0230, 0.0216], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 02:23:00,004 INFO [train.py:892] (0/4) Epoch 31, batch 500, loss[loss=0.1522, simple_loss=0.2222, pruned_loss=0.04104, over 19779.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2408, pruned_loss=0.04217, over 3630551.66 frames. ], batch size: 163, lr: 5.07e-03, grad_scale: 16.0 +2023-03-29 02:23:38,396 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7921, 3.0393, 2.6376, 2.2403, 2.7555, 3.0359, 2.9530, 3.0552], + device='cuda:0'), covar=tensor([0.0320, 0.0310, 0.0329, 0.0559, 0.0379, 0.0286, 0.0249, 0.0221], + device='cuda:0'), in_proj_covar=tensor([0.0101, 0.0094, 0.0098, 0.0100, 0.0103, 0.0085, 0.0085, 0.0085], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 02:24:42,817 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4588, 4.5159, 4.8346, 4.6080, 4.7266, 4.3549, 4.5699, 4.3971], + device='cuda:0'), covar=tensor([0.1433, 0.1731, 0.0931, 0.1336, 0.0886, 0.1042, 0.1764, 0.2018], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0331, 0.0367, 0.0300, 0.0274, 0.0281, 0.0358, 0.0387], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 02:25:07,710 INFO [train.py:892] (0/4) Epoch 31, batch 550, loss[loss=0.1534, simple_loss=0.2273, pruned_loss=0.03972, over 19875.00 frames. ], tot_loss[loss=0.163, simple_loss=0.241, pruned_loss=0.0425, over 3700065.95 frames. ], batch size: 125, lr: 5.07e-03, grad_scale: 16.0 +2023-03-29 02:25:15,842 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56204.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:25:20,466 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56206.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:26:05,153 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.698e+02 3.907e+02 4.783e+02 5.520e+02 2.240e+03, threshold=9.567e+02, percent-clipped=4.0 +2023-03-29 02:27:16,238 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5321, 2.5614, 1.6460, 2.8955, 2.6516, 2.8100, 2.9387, 2.4089], + device='cuda:0'), covar=tensor([0.0718, 0.0812, 0.1628, 0.0743, 0.0751, 0.0583, 0.0644, 0.0871], + device='cuda:0'), in_proj_covar=tensor([0.0143, 0.0143, 0.0143, 0.0151, 0.0132, 0.0135, 0.0146, 0.0144], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 02:27:17,919 INFO [train.py:892] (0/4) Epoch 31, batch 600, loss[loss=0.1669, simple_loss=0.2461, pruned_loss=0.04382, over 19736.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2409, pruned_loss=0.04222, over 3755122.45 frames. ], batch size: 259, lr: 5.06e-03, grad_scale: 16.0 +2023-03-29 02:29:20,417 INFO [train.py:892] (0/4) Epoch 31, batch 650, loss[loss=0.1746, simple_loss=0.2422, pruned_loss=0.05352, over 19781.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.24, pruned_loss=0.04172, over 3799777.88 frames. ], batch size: 131, lr: 5.06e-03, grad_scale: 32.0 +2023-03-29 02:30:21,238 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.457e+02 3.714e+02 4.450e+02 5.010e+02 8.599e+02, threshold=8.900e+02, percent-clipped=0.0 +2023-03-29 02:31:30,806 INFO [train.py:892] (0/4) Epoch 31, batch 700, loss[loss=0.1445, simple_loss=0.2203, pruned_loss=0.03437, over 19887.00 frames. ], tot_loss[loss=0.1625, simple_loss=0.2405, pruned_loss=0.04223, over 3833448.29 frames. ], batch size: 92, lr: 5.06e-03, grad_scale: 32.0 +2023-03-29 02:32:43,516 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5635, 4.3053, 4.3760, 4.6087, 4.2331, 4.6951, 4.7024, 4.8880], + device='cuda:0'), covar=tensor([0.0663, 0.0400, 0.0472, 0.0350, 0.0730, 0.0444, 0.0379, 0.0295], + device='cuda:0'), in_proj_covar=tensor([0.0152, 0.0177, 0.0201, 0.0175, 0.0173, 0.0158, 0.0151, 0.0198], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 02:33:34,594 INFO [train.py:892] (0/4) Epoch 31, batch 750, loss[loss=0.1574, simple_loss=0.2437, pruned_loss=0.03551, over 19734.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2407, pruned_loss=0.04276, over 3859730.98 frames. ], batch size: 47, lr: 5.06e-03, grad_scale: 32.0 +2023-03-29 02:33:43,313 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56404.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:34:25,116 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.63 vs. limit=5.0 +2023-03-29 02:34:36,548 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 3.075e+02 3.994e+02 4.748e+02 5.714e+02 1.014e+03, threshold=9.496e+02, percent-clipped=1.0 +2023-03-29 02:35:40,564 INFO [train.py:892] (0/4) Epoch 31, batch 800, loss[loss=0.1708, simple_loss=0.2498, pruned_loss=0.04593, over 19787.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2414, pruned_loss=0.04312, over 3880079.49 frames. ], batch size: 45, lr: 5.06e-03, grad_scale: 32.0 +2023-03-29 02:35:43,791 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56452.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:37:14,352 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-03-29 02:37:49,406 INFO [train.py:892] (0/4) Epoch 31, batch 850, loss[loss=0.1581, simple_loss=0.2424, pruned_loss=0.0369, over 19567.00 frames. ], tot_loss[loss=0.1637, simple_loss=0.2421, pruned_loss=0.0427, over 3893855.69 frames. ], batch size: 53, lr: 5.05e-03, grad_scale: 32.0 +2023-03-29 02:37:50,265 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56501.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:37:57,961 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56504.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:38:41,448 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56523.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 02:38:42,535 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.680e+02 3.782e+02 4.394e+02 5.442e+02 8.928e+02, threshold=8.788e+02, percent-clipped=0.0 +2023-03-29 02:39:38,027 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-03-29 02:39:52,789 INFO [train.py:892] (0/4) Epoch 31, batch 900, loss[loss=0.1608, simple_loss=0.2522, pruned_loss=0.03472, over 19957.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2419, pruned_loss=0.04265, over 3906035.07 frames. ], batch size: 53, lr: 5.05e-03, grad_scale: 32.0 +2023-03-29 02:39:55,999 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56552.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:40:49,364 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9406, 5.0892, 5.3638, 5.1591, 5.1966, 4.9055, 5.1319, 4.9249], + device='cuda:0'), covar=tensor([0.1343, 0.1229, 0.0773, 0.1169, 0.0725, 0.0867, 0.1532, 0.1873], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0330, 0.0368, 0.0298, 0.0274, 0.0281, 0.0358, 0.0391], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 02:41:03,223 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8423, 5.0979, 5.1672, 5.0612, 4.7523, 5.1165, 4.5798, 4.6565], + device='cuda:0'), covar=tensor([0.0430, 0.0440, 0.0412, 0.0403, 0.0606, 0.0498, 0.0693, 0.0977], + device='cuda:0'), in_proj_covar=tensor([0.0269, 0.0286, 0.0294, 0.0258, 0.0264, 0.0248, 0.0267, 0.0314], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 02:41:18,509 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56584.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 02:42:00,142 INFO [train.py:892] (0/4) Epoch 31, batch 950, loss[loss=0.1898, simple_loss=0.2719, pruned_loss=0.05385, over 19718.00 frames. ], tot_loss[loss=0.1635, simple_loss=0.2418, pruned_loss=0.04255, over 3915110.82 frames. ], batch size: 305, lr: 5.05e-03, grad_scale: 32.0 +2023-03-29 02:42:56,020 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-29 02:43:01,367 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.678e+02 3.807e+02 4.389e+02 5.044e+02 8.722e+02, threshold=8.778e+02, percent-clipped=0.0 +2023-03-29 02:44:14,701 INFO [train.py:892] (0/4) Epoch 31, batch 1000, loss[loss=0.1661, simple_loss=0.2366, pruned_loss=0.04777, over 19822.00 frames. ], tot_loss[loss=0.162, simple_loss=0.2408, pruned_loss=0.04167, over 3921737.96 frames. ], batch size: 187, lr: 5.05e-03, grad_scale: 32.0 +2023-03-29 02:45:21,237 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2620, 4.5214, 4.9590, 4.4670, 4.1622, 4.7789, 4.5595, 5.1060], + device='cuda:0'), covar=tensor([0.1213, 0.0381, 0.0454, 0.0450, 0.0913, 0.0540, 0.0589, 0.0426], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0223, 0.0223, 0.0235, 0.0208, 0.0245, 0.0233, 0.0220], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 02:46:17,942 INFO [train.py:892] (0/4) Epoch 31, batch 1050, loss[loss=0.1895, simple_loss=0.2691, pruned_loss=0.05497, over 19590.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2406, pruned_loss=0.04155, over 3927035.38 frames. ], batch size: 44, lr: 5.04e-03, grad_scale: 32.0 +2023-03-29 02:46:33,758 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-03-29 02:47:17,645 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.497e+02 3.728e+02 4.373e+02 5.189e+02 1.039e+03, threshold=8.746e+02, percent-clipped=2.0 +2023-03-29 02:47:20,721 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.3033, 1.8445, 1.9918, 2.6337, 2.8861, 2.9630, 2.8782, 2.9326], + device='cuda:0'), covar=tensor([0.1173, 0.1835, 0.1655, 0.0800, 0.0565, 0.0405, 0.0481, 0.0435], + device='cuda:0'), in_proj_covar=tensor([0.0161, 0.0170, 0.0178, 0.0151, 0.0137, 0.0132, 0.0124, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 02:48:25,858 INFO [train.py:892] (0/4) Epoch 31, batch 1100, loss[loss=0.1936, simple_loss=0.2606, pruned_loss=0.06335, over 19872.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2402, pruned_loss=0.04143, over 3932621.07 frames. ], batch size: 138, lr: 5.04e-03, grad_scale: 32.0 +2023-03-29 02:49:39,987 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5323, 2.5483, 1.6554, 2.9178, 2.6930, 2.7734, 2.9375, 2.2937], + device='cuda:0'), covar=tensor([0.0746, 0.0791, 0.1625, 0.0600, 0.0687, 0.0582, 0.0586, 0.0921], + device='cuda:0'), in_proj_covar=tensor([0.0144, 0.0144, 0.0143, 0.0152, 0.0132, 0.0135, 0.0146, 0.0144], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 02:50:26,115 INFO [train.py:892] (0/4) Epoch 31, batch 1150, loss[loss=0.1834, simple_loss=0.2652, pruned_loss=0.05079, over 19737.00 frames. ], tot_loss[loss=0.1614, simple_loss=0.2399, pruned_loss=0.04145, over 3935635.57 frames. ], batch size: 276, lr: 5.04e-03, grad_scale: 32.0 +2023-03-29 02:50:26,972 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56801.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:50:31,723 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56803.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:51:22,619 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.381e+02 3.820e+02 4.662e+02 5.416e+02 9.191e+02, threshold=9.324e+02, percent-clipped=1.0 +2023-03-29 02:52:23,482 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5183, 2.6001, 2.6691, 2.2044, 2.7728, 2.3799, 2.7308, 2.6535], + device='cuda:0'), covar=tensor([0.0509, 0.0511, 0.0469, 0.0842, 0.0359, 0.0504, 0.0443, 0.0393], + device='cuda:0'), in_proj_covar=tensor([0.0078, 0.0085, 0.0084, 0.0110, 0.0079, 0.0081, 0.0079, 0.0072], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 02:52:27,244 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56849.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:52:31,087 INFO [train.py:892] (0/4) Epoch 31, batch 1200, loss[loss=0.1374, simple_loss=0.2208, pruned_loss=0.02701, over 19750.00 frames. ], tot_loss[loss=0.1621, simple_loss=0.2405, pruned_loss=0.04182, over 3939057.57 frames. ], batch size: 110, lr: 5.04e-03, grad_scale: 32.0 +2023-03-29 02:53:06,167 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56864.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:53:44,388 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56879.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 02:54:41,989 INFO [train.py:892] (0/4) Epoch 31, batch 1250, loss[loss=0.1839, simple_loss=0.2632, pruned_loss=0.05235, over 19720.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2407, pruned_loss=0.04195, over 3939907.03 frames. ], batch size: 310, lr: 5.04e-03, grad_scale: 32.0 +2023-03-29 02:55:02,821 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56908.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:55:43,170 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.950e+02 3.644e+02 4.260e+02 5.333e+02 1.198e+03, threshold=8.519e+02, percent-clipped=0.0 +2023-03-29 02:56:00,161 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.1137, 5.3878, 5.4090, 5.3272, 5.0374, 5.3919, 4.8697, 4.8350], + device='cuda:0'), covar=tensor([0.0422, 0.0457, 0.0457, 0.0411, 0.0603, 0.0466, 0.0701, 0.0991], + device='cuda:0'), in_proj_covar=tensor([0.0266, 0.0283, 0.0291, 0.0255, 0.0261, 0.0246, 0.0264, 0.0310], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 02:56:45,904 INFO [train.py:892] (0/4) Epoch 31, batch 1300, loss[loss=0.1733, simple_loss=0.2405, pruned_loss=0.053, over 19762.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2422, pruned_loss=0.04245, over 3937227.89 frames. ], batch size: 49, lr: 5.03e-03, grad_scale: 32.0 +2023-03-29 02:57:28,947 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56969.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:58:26,981 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56993.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:58:31,593 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56995.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 02:58:45,954 INFO [train.py:892] (0/4) Epoch 31, batch 1350, loss[loss=0.1465, simple_loss=0.2277, pruned_loss=0.03265, over 19877.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2413, pruned_loss=0.04216, over 3941529.48 frames. ], batch size: 84, lr: 5.03e-03, grad_scale: 32.0 +2023-03-29 02:59:37,591 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.342e+02 3.868e+02 4.711e+02 5.636e+02 9.457e+02, threshold=9.422e+02, percent-clipped=6.0 +2023-03-29 03:00:14,678 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0869, 2.8183, 3.1736, 3.3231, 3.8119, 4.3491, 4.1207, 4.2356], + device='cuda:0'), covar=tensor([0.0911, 0.1521, 0.1329, 0.0668, 0.0395, 0.0215, 0.0376, 0.0329], + device='cuda:0'), in_proj_covar=tensor([0.0159, 0.0168, 0.0177, 0.0150, 0.0136, 0.0131, 0.0123, 0.0115], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 03:00:18,959 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8980, 3.2934, 3.4178, 3.8883, 2.6005, 3.1973, 2.4595, 2.3890], + device='cuda:0'), covar=tensor([0.0603, 0.1697, 0.0949, 0.0455, 0.2080, 0.0911, 0.1352, 0.1685], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0333, 0.0249, 0.0205, 0.0250, 0.0210, 0.0219, 0.0217], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 03:00:41,602 INFO [train.py:892] (0/4) Epoch 31, batch 1400, loss[loss=0.1382, simple_loss=0.2156, pruned_loss=0.0304, over 19746.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.2407, pruned_loss=0.04203, over 3944550.52 frames. ], batch size: 118, lr: 5.03e-03, grad_scale: 32.0 +2023-03-29 03:00:50,083 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57054.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:00:54,591 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57056.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:02:44,884 INFO [train.py:892] (0/4) Epoch 31, batch 1450, loss[loss=0.144, simple_loss=0.233, pruned_loss=0.02757, over 19739.00 frames. ], tot_loss[loss=0.1625, simple_loss=0.2407, pruned_loss=0.04213, over 3945933.58 frames. ], batch size: 99, lr: 5.03e-03, grad_scale: 32.0 +2023-03-29 03:03:15,490 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-29 03:03:41,327 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.443e+02 3.544e+02 4.145e+02 5.196e+02 9.297e+02, threshold=8.291e+02, percent-clipped=0.0 +2023-03-29 03:04:44,666 INFO [train.py:892] (0/4) Epoch 31, batch 1500, loss[loss=0.217, simple_loss=0.2869, pruned_loss=0.07352, over 19649.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2402, pruned_loss=0.04218, over 3947309.20 frames. ], batch size: 330, lr: 5.02e-03, grad_scale: 32.0 +2023-03-29 03:04:57,681 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1277, 3.0963, 4.8822, 3.6095, 3.7621, 3.5346, 2.6139, 2.7742], + device='cuda:0'), covar=tensor([0.0973, 0.3256, 0.0437, 0.0979, 0.1812, 0.1384, 0.2478, 0.2511], + device='cuda:0'), in_proj_covar=tensor([0.0349, 0.0386, 0.0345, 0.0283, 0.0369, 0.0373, 0.0370, 0.0341], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 03:05:07,588 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57159.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:05:55,094 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57179.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 03:06:12,871 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6684, 3.8036, 2.5858, 4.4370, 3.9256, 4.3336, 4.3994, 3.4213], + device='cuda:0'), covar=tensor([0.0593, 0.0539, 0.1256, 0.0863, 0.0515, 0.0423, 0.0632, 0.0745], + device='cuda:0'), in_proj_covar=tensor([0.0144, 0.0144, 0.0143, 0.0152, 0.0133, 0.0136, 0.0147, 0.0145], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 03:06:12,876 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57186.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:06:41,393 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57197.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:06:49,032 INFO [train.py:892] (0/4) Epoch 31, batch 1550, loss[loss=0.1601, simple_loss=0.2411, pruned_loss=0.03956, over 19661.00 frames. ], tot_loss[loss=0.1621, simple_loss=0.2406, pruned_loss=0.04181, over 3948616.35 frames. ], batch size: 50, lr: 5.02e-03, grad_scale: 32.0 +2023-03-29 03:07:43,721 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.504e+02 3.735e+02 4.518e+02 5.755e+02 1.077e+03, threshold=9.035e+02, percent-clipped=5.0 +2023-03-29 03:07:52,528 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57227.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 03:08:30,310 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57244.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:08:38,465 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57247.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:08:46,359 INFO [train.py:892] (0/4) Epoch 31, batch 1600, loss[loss=0.1606, simple_loss=0.2333, pruned_loss=0.04391, over 19800.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2404, pruned_loss=0.04162, over 3949332.40 frames. ], batch size: 150, lr: 5.02e-03, grad_scale: 32.0 +2023-03-29 03:09:02,944 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57258.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 03:09:20,478 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57264.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:10:45,182 INFO [train.py:892] (0/4) Epoch 31, batch 1650, loss[loss=0.1484, simple_loss=0.2289, pruned_loss=0.03392, over 19775.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2407, pruned_loss=0.04202, over 3949454.04 frames. ], batch size: 233, lr: 5.02e-03, grad_scale: 32.0 +2023-03-29 03:10:58,447 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57305.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:11:04,841 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4948, 4.5612, 2.7199, 4.8018, 5.0101, 2.2187, 4.3522, 3.7160], + device='cuda:0'), covar=tensor([0.0580, 0.0829, 0.2540, 0.0625, 0.0447, 0.2643, 0.0793, 0.0816], + device='cuda:0'), in_proj_covar=tensor([0.0232, 0.0257, 0.0230, 0.0274, 0.0254, 0.0203, 0.0239, 0.0197], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 03:11:38,982 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.517e+02 3.692e+02 4.492e+02 5.232e+02 9.843e+02, threshold=8.984e+02, percent-clipped=1.0 +2023-03-29 03:11:39,884 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5546, 4.4111, 4.8864, 4.4780, 4.0436, 4.6682, 4.5570, 5.0047], + device='cuda:0'), covar=tensor([0.0819, 0.0383, 0.0354, 0.0384, 0.0972, 0.0554, 0.0434, 0.0303], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0225, 0.0227, 0.0238, 0.0211, 0.0248, 0.0235, 0.0222], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 03:12:37,101 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57349.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:12:40,552 INFO [train.py:892] (0/4) Epoch 31, batch 1700, loss[loss=0.1488, simple_loss=0.2233, pruned_loss=0.03716, over 19846.00 frames. ], tot_loss[loss=0.1621, simple_loss=0.2404, pruned_loss=0.04185, over 3950238.41 frames. ], batch size: 104, lr: 5.02e-03, grad_scale: 32.0 +2023-03-29 03:12:41,503 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57351.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:13:50,849 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7261, 3.8426, 2.3655, 3.9931, 4.1148, 1.8770, 3.4006, 3.1991], + device='cuda:0'), covar=tensor([0.0796, 0.0883, 0.2744, 0.0868, 0.0629, 0.2921, 0.1137, 0.0867], + device='cuda:0'), in_proj_covar=tensor([0.0233, 0.0258, 0.0230, 0.0276, 0.0254, 0.0204, 0.0239, 0.0198], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 03:14:26,227 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57398.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:14:31,711 INFO [train.py:892] (0/4) Epoch 31, batch 1750, loss[loss=0.1475, simple_loss=0.2297, pruned_loss=0.03261, over 19780.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2401, pruned_loss=0.04176, over 3950515.71 frames. ], batch size: 53, lr: 5.01e-03, grad_scale: 32.0 +2023-03-29 03:15:20,066 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.125e+02 3.770e+02 4.540e+02 5.122e+02 1.411e+03, threshold=9.080e+02, percent-clipped=2.0 +2023-03-29 03:15:34,154 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0782, 2.3727, 4.1767, 3.6391, 4.0259, 4.1628, 3.9843, 3.9400], + device='cuda:0'), covar=tensor([0.0586, 0.1011, 0.0106, 0.0570, 0.0140, 0.0205, 0.0170, 0.0167], + device='cuda:0'), in_proj_covar=tensor([0.0099, 0.0103, 0.0089, 0.0153, 0.0086, 0.0098, 0.0090, 0.0086], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 03:16:11,491 INFO [train.py:892] (0/4) Epoch 31, batch 1800, loss[loss=0.1913, simple_loss=0.273, pruned_loss=0.05484, over 19718.00 frames. ], tot_loss[loss=0.1616, simple_loss=0.2399, pruned_loss=0.04161, over 3951277.43 frames. ], batch size: 295, lr: 5.01e-03, grad_scale: 16.0 +2023-03-29 03:16:27,949 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57459.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:16:28,003 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57459.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:16:33,444 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57462.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:17:33,902 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57495.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:17:44,837 INFO [train.py:892] (0/4) Epoch 31, batch 1850, loss[loss=0.1578, simple_loss=0.2473, pruned_loss=0.03419, over 19828.00 frames. ], tot_loss[loss=0.1625, simple_loss=0.2416, pruned_loss=0.04173, over 3950720.55 frames. ], batch size: 57, lr: 5.01e-03, grad_scale: 16.0 +2023-03-29 03:17:48,925 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.17 vs. limit=5.0 +2023-03-29 03:17:52,844 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-31.pt +2023-03-29 03:18:57,344 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4558, 3.3718, 3.2837, 3.0447, 3.4522, 2.6534, 2.7196, 1.6161], + device='cuda:0'), covar=tensor([0.0246, 0.0253, 0.0180, 0.0232, 0.0185, 0.1349, 0.0718, 0.1895], + device='cuda:0'), in_proj_covar=tensor([0.0105, 0.0147, 0.0114, 0.0135, 0.0119, 0.0136, 0.0143, 0.0128], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 03:18:58,448 INFO [train.py:892] (0/4) Epoch 32, batch 0, loss[loss=0.2437, simple_loss=0.3174, pruned_loss=0.08496, over 19416.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.3174, pruned_loss=0.08496, over 19416.00 frames. ], batch size: 412, lr: 4.93e-03, grad_scale: 16.0 +2023-03-29 03:18:58,449 INFO [train.py:917] (0/4) Computing validation loss +2023-03-29 03:19:34,344 INFO [train.py:926] (0/4) Epoch 32, validation: loss=0.1821, simple_loss=0.2499, pruned_loss=0.05717, over 2883724.00 frames. +2023-03-29 03:19:34,346 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-29 03:19:37,504 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57507.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:20:17,631 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57523.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:20:21,037 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.254e+02 3.434e+02 4.255e+02 5.001e+02 8.278e+02, threshold=8.509e+02, percent-clipped=0.0 +2023-03-29 03:21:01,563 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57542.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:21:28,465 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57553.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 03:21:33,962 INFO [train.py:892] (0/4) Epoch 32, batch 50, loss[loss=0.1424, simple_loss=0.2248, pruned_loss=0.02998, over 19732.00 frames. ], tot_loss[loss=0.1557, simple_loss=0.2333, pruned_loss=0.03907, over 891043.97 frames. ], batch size: 80, lr: 4.93e-03, grad_scale: 16.0 +2023-03-29 03:21:35,033 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57556.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:21:53,834 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57564.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:21:58,212 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8663, 2.7858, 1.8512, 3.3079, 3.0417, 3.2532, 3.3304, 2.6420], + device='cuda:0'), covar=tensor([0.0642, 0.0723, 0.1668, 0.0701, 0.0649, 0.0483, 0.0623, 0.0840], + device='cuda:0'), in_proj_covar=tensor([0.0144, 0.0144, 0.0143, 0.0153, 0.0133, 0.0135, 0.0147, 0.0145], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 03:23:18,872 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57600.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:23:31,211 INFO [train.py:892] (0/4) Epoch 32, batch 100, loss[loss=0.1619, simple_loss=0.2389, pruned_loss=0.04245, over 19868.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.234, pruned_loss=0.03851, over 1568506.87 frames. ], batch size: 122, lr: 4.92e-03, grad_scale: 16.0 +2023-03-29 03:23:47,409 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57612.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:24:16,915 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7593, 2.6859, 2.9681, 2.6575, 3.1319, 3.0758, 3.6789, 3.9335], + device='cuda:0'), covar=tensor([0.0633, 0.1736, 0.1532, 0.2143, 0.1648, 0.1516, 0.0611, 0.0628], + device='cuda:0'), in_proj_covar=tensor([0.0255, 0.0241, 0.0270, 0.0255, 0.0298, 0.0259, 0.0232, 0.0258], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 03:24:17,736 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.599e+02 3.672e+02 4.484e+02 5.551e+02 1.135e+03, threshold=8.969e+02, percent-clipped=1.0 +2023-03-29 03:25:12,257 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57649.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:25:16,294 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57651.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:25:20,347 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9370, 4.7780, 5.3973, 4.9012, 4.2990, 5.1057, 5.0177, 5.5830], + device='cuda:0'), covar=tensor([0.0867, 0.0389, 0.0343, 0.0379, 0.0811, 0.0496, 0.0473, 0.0280], + device='cuda:0'), in_proj_covar=tensor([0.0288, 0.0223, 0.0224, 0.0235, 0.0209, 0.0245, 0.0232, 0.0219], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 03:25:27,048 INFO [train.py:892] (0/4) Epoch 32, batch 150, loss[loss=0.1443, simple_loss=0.2231, pruned_loss=0.03278, over 19872.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.2358, pruned_loss=0.03966, over 2096308.67 frames. ], batch size: 108, lr: 4.92e-03, grad_scale: 16.0 +2023-03-29 03:26:16,736 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.61 vs. limit=5.0 +2023-03-29 03:27:01,601 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-29 03:27:07,420 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57697.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:27:11,776 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57699.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:27:27,825 INFO [train.py:892] (0/4) Epoch 32, batch 200, loss[loss=0.1559, simple_loss=0.2328, pruned_loss=0.03955, over 19877.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.238, pruned_loss=0.04013, over 2506299.55 frames. ], batch size: 139, lr: 4.92e-03, grad_scale: 16.0 +2023-03-29 03:28:10,868 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.596e+02 3.546e+02 4.264e+02 5.379e+02 8.670e+02, threshold=8.529e+02, percent-clipped=0.0 +2023-03-29 03:28:30,905 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57733.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:29:16,456 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57754.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:29:19,482 INFO [train.py:892] (0/4) Epoch 32, batch 250, loss[loss=0.1366, simple_loss=0.2119, pruned_loss=0.03069, over 19781.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2376, pruned_loss=0.0398, over 2826625.79 frames. ], batch size: 131, lr: 4.92e-03, grad_scale: 16.0 +2023-03-29 03:29:47,835 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-29 03:30:45,538 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2264, 5.5460, 5.5783, 5.4568, 5.2447, 5.5401, 5.0121, 5.0174], + device='cuda:0'), covar=tensor([0.0422, 0.0420, 0.0418, 0.0404, 0.0538, 0.0450, 0.0633, 0.0920], + device='cuda:0'), in_proj_covar=tensor([0.0272, 0.0288, 0.0300, 0.0264, 0.0266, 0.0251, 0.0270, 0.0315], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 03:30:50,052 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57794.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:31:18,735 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.3605, 2.4325, 2.4868, 2.5574, 2.4140, 2.5210, 2.3711, 2.4951], + device='cuda:0'), covar=tensor([0.0368, 0.0344, 0.0339, 0.0281, 0.0415, 0.0349, 0.0409, 0.0328], + device='cuda:0'), in_proj_covar=tensor([0.0087, 0.0081, 0.0084, 0.0078, 0.0091, 0.0084, 0.0100, 0.0073], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 03:31:19,681 INFO [train.py:892] (0/4) Epoch 32, batch 300, loss[loss=0.2052, simple_loss=0.2722, pruned_loss=0.06908, over 19810.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2393, pruned_loss=0.04053, over 3075455.86 frames. ], batch size: 126, lr: 4.92e-03, grad_scale: 16.0 +2023-03-29 03:31:40,265 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-03-29 03:31:50,526 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57818.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:32:05,155 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.255e+02 3.451e+02 4.259e+02 5.251e+02 1.158e+03, threshold=8.517e+02, percent-clipped=3.0 +2023-03-29 03:32:47,918 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6550, 4.6597, 5.0143, 4.8318, 4.8902, 4.5116, 4.7748, 4.5820], + device='cuda:0'), covar=tensor([0.1502, 0.1607, 0.0862, 0.1261, 0.0865, 0.0914, 0.1768, 0.2175], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0328, 0.0365, 0.0295, 0.0273, 0.0280, 0.0355, 0.0387], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 03:32:48,042 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57842.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:33:09,052 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57851.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:33:13,461 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57853.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 03:33:19,328 INFO [train.py:892] (0/4) Epoch 32, batch 350, loss[loss=0.1497, simple_loss=0.2184, pruned_loss=0.04054, over 19777.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.2395, pruned_loss=0.04107, over 3270114.73 frames. ], batch size: 163, lr: 4.91e-03, grad_scale: 16.0 +2023-03-29 03:34:39,374 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57890.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:35:00,912 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57900.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:35:02,816 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57901.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:35:14,070 INFO [train.py:892] (0/4) Epoch 32, batch 400, loss[loss=0.1454, simple_loss=0.2139, pruned_loss=0.03842, over 19842.00 frames. ], tot_loss[loss=0.162, simple_loss=0.2407, pruned_loss=0.04162, over 3420287.42 frames. ], batch size: 144, lr: 4.91e-03, grad_scale: 16.0 +2023-03-29 03:35:17,096 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2213, 4.9219, 4.9315, 5.2668, 4.9351, 5.4446, 5.3884, 5.5605], + device='cuda:0'), covar=tensor([0.0662, 0.0391, 0.0446, 0.0374, 0.0648, 0.0415, 0.0357, 0.0301], + device='cuda:0'), in_proj_covar=tensor([0.0152, 0.0177, 0.0202, 0.0175, 0.0174, 0.0159, 0.0150, 0.0198], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 03:35:22,105 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-03-29 03:35:59,242 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.499e+02 3.927e+02 4.614e+02 5.637e+02 8.834e+02, threshold=9.228e+02, percent-clipped=1.0 +2023-03-29 03:36:18,475 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-03-29 03:36:32,037 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.2353, 2.2418, 2.2972, 2.3668, 2.2678, 2.3491, 2.2778, 2.3730], + device='cuda:0'), covar=tensor([0.0428, 0.0375, 0.0357, 0.0328, 0.0500, 0.0334, 0.0502, 0.0360], + device='cuda:0'), in_proj_covar=tensor([0.0086, 0.0081, 0.0083, 0.0078, 0.0091, 0.0083, 0.0100, 0.0073], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 03:36:53,627 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57948.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:37:11,506 INFO [train.py:892] (0/4) Epoch 32, batch 450, loss[loss=0.1501, simple_loss=0.2344, pruned_loss=0.03286, over 19853.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2421, pruned_loss=0.04286, over 3538019.82 frames. ], batch size: 60, lr: 4.91e-03, grad_scale: 16.0 +2023-03-29 03:38:52,697 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-58000.pt +2023-03-29 03:39:11,306 INFO [train.py:892] (0/4) Epoch 32, batch 500, loss[loss=0.1514, simple_loss=0.2278, pruned_loss=0.03751, over 19651.00 frames. ], tot_loss[loss=0.1632, simple_loss=0.2412, pruned_loss=0.04261, over 3630516.05 frames. ], batch size: 67, lr: 4.91e-03, grad_scale: 16.0 +2023-03-29 03:39:55,699 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.669e+02 3.901e+02 4.425e+02 5.266e+02 9.447e+02, threshold=8.850e+02, percent-clipped=1.0 +2023-03-29 03:41:02,885 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58054.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:41:07,806 INFO [train.py:892] (0/4) Epoch 32, batch 550, loss[loss=0.1457, simple_loss=0.2174, pruned_loss=0.03697, over 19857.00 frames. ], tot_loss[loss=0.1622, simple_loss=0.2403, pruned_loss=0.04201, over 3702357.64 frames. ], batch size: 158, lr: 4.91e-03, grad_scale: 16.0 +2023-03-29 03:41:26,328 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5559, 2.6968, 4.5132, 3.8368, 4.3246, 4.4898, 4.3329, 4.2317], + device='cuda:0'), covar=tensor([0.0519, 0.0982, 0.0105, 0.0842, 0.0147, 0.0219, 0.0154, 0.0159], + device='cuda:0'), in_proj_covar=tensor([0.0099, 0.0103, 0.0088, 0.0152, 0.0085, 0.0098, 0.0090, 0.0085], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 03:41:35,640 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8953, 5.2087, 5.2577, 5.2039, 4.8335, 5.2309, 4.7627, 4.7489], + device='cuda:0'), covar=tensor([0.0479, 0.0507, 0.0535, 0.0444, 0.0651, 0.0535, 0.0693, 0.1082], + device='cuda:0'), in_proj_covar=tensor([0.0271, 0.0287, 0.0300, 0.0262, 0.0266, 0.0251, 0.0269, 0.0315], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 03:42:24,159 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58089.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:42:56,941 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58102.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:43:03,378 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0686, 2.6393, 4.1363, 3.6747, 4.0002, 4.1403, 3.9618, 3.8907], + device='cuda:0'), covar=tensor([0.0568, 0.0902, 0.0104, 0.0602, 0.0153, 0.0227, 0.0171, 0.0169], + device='cuda:0'), in_proj_covar=tensor([0.0098, 0.0102, 0.0088, 0.0151, 0.0085, 0.0097, 0.0089, 0.0085], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 03:43:04,382 INFO [train.py:892] (0/4) Epoch 32, batch 600, loss[loss=0.1655, simple_loss=0.2381, pruned_loss=0.04649, over 19816.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2399, pruned_loss=0.04113, over 3756585.50 frames. ], batch size: 132, lr: 4.90e-03, grad_scale: 16.0 +2023-03-29 03:43:34,198 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58118.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:43:48,666 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.610e+02 3.618e+02 4.223e+02 5.011e+02 6.745e+02, threshold=8.447e+02, percent-clipped=0.0 +2023-03-29 03:44:27,776 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58141.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:44:33,754 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58144.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:44:49,558 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58151.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:44:59,379 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9785, 3.3326, 2.9475, 2.4927, 2.8434, 3.3218, 3.2639, 3.2640], + device='cuda:0'), covar=tensor([0.0387, 0.0328, 0.0298, 0.0554, 0.0404, 0.0273, 0.0231, 0.0216], + device='cuda:0'), in_proj_covar=tensor([0.0105, 0.0099, 0.0101, 0.0104, 0.0107, 0.0089, 0.0089, 0.0089], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 03:45:00,227 INFO [train.py:892] (0/4) Epoch 32, batch 650, loss[loss=0.1624, simple_loss=0.2377, pruned_loss=0.04359, over 19706.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.2396, pruned_loss=0.04114, over 3798938.21 frames. ], batch size: 60, lr: 4.90e-03, grad_scale: 16.0 +2023-03-29 03:45:21,019 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58166.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:46:26,152 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7776, 3.0210, 2.9369, 3.0799, 2.8583, 2.8986, 2.8491, 3.1360], + device='cuda:0'), covar=tensor([0.0358, 0.0299, 0.0306, 0.0207, 0.0393, 0.0306, 0.0350, 0.0369], + device='cuda:0'), in_proj_covar=tensor([0.0086, 0.0080, 0.0083, 0.0077, 0.0090, 0.0083, 0.0099, 0.0072], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 03:46:36,740 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58199.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:46:43,672 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58202.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:46:51,897 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58205.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:46:52,984 INFO [train.py:892] (0/4) Epoch 32, batch 700, loss[loss=0.1403, simple_loss=0.2195, pruned_loss=0.0305, over 19811.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.2383, pruned_loss=0.04059, over 3834185.27 frames. ], batch size: 117, lr: 4.90e-03, grad_scale: 16.0 +2023-03-29 03:47:39,126 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.613e+02 3.735e+02 4.349e+02 5.105e+02 9.277e+02, threshold=8.697e+02, percent-clipped=1.0 +2023-03-29 03:47:49,567 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-03-29 03:48:54,101 INFO [train.py:892] (0/4) Epoch 32, batch 750, loss[loss=0.1561, simple_loss=0.242, pruned_loss=0.03507, over 19868.00 frames. ], tot_loss[loss=0.1612, simple_loss=0.2402, pruned_loss=0.04107, over 3857203.05 frames. ], batch size: 89, lr: 4.90e-03, grad_scale: 16.0 +2023-03-29 03:49:26,687 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.9325, 2.1495, 1.9575, 1.3758, 2.0292, 2.1486, 2.0229, 2.0508], + device='cuda:0'), covar=tensor([0.0434, 0.0333, 0.0363, 0.0627, 0.0420, 0.0304, 0.0332, 0.0333], + device='cuda:0'), in_proj_covar=tensor([0.0105, 0.0098, 0.0101, 0.0104, 0.0107, 0.0089, 0.0089, 0.0088], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 03:50:01,008 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7369, 3.8356, 2.3921, 4.6538, 4.0172, 4.4437, 4.5373, 3.5467], + device='cuda:0'), covar=tensor([0.0574, 0.0565, 0.1604, 0.0412, 0.0524, 0.0381, 0.0400, 0.0719], + device='cuda:0'), in_proj_covar=tensor([0.0145, 0.0145, 0.0144, 0.0154, 0.0135, 0.0137, 0.0149, 0.0146], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 03:50:37,995 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=5.06 vs. limit=5.0 +2023-03-29 03:50:47,189 INFO [train.py:892] (0/4) Epoch 32, batch 800, loss[loss=0.1494, simple_loss=0.238, pruned_loss=0.03041, over 19661.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.2397, pruned_loss=0.04093, over 3877289.66 frames. ], batch size: 50, lr: 4.90e-03, grad_scale: 16.0 +2023-03-29 03:51:31,607 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.700e+02 3.818e+02 4.452e+02 5.202e+02 1.002e+03, threshold=8.904e+02, percent-clipped=2.0 +2023-03-29 03:52:43,173 INFO [train.py:892] (0/4) Epoch 32, batch 850, loss[loss=0.1322, simple_loss=0.2128, pruned_loss=0.0258, over 19774.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2395, pruned_loss=0.04072, over 3892607.86 frames. ], batch size: 108, lr: 4.89e-03, grad_scale: 16.0 +2023-03-29 03:52:55,558 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4489, 3.5163, 2.2421, 4.3466, 3.7974, 4.2063, 4.2698, 3.2815], + device='cuda:0'), covar=tensor([0.0603, 0.0578, 0.1491, 0.0487, 0.0586, 0.0445, 0.0539, 0.0806], + device='cuda:0'), in_proj_covar=tensor([0.0144, 0.0145, 0.0143, 0.0153, 0.0135, 0.0136, 0.0148, 0.0146], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 03:52:59,463 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9957, 5.2385, 5.3189, 5.2140, 4.9177, 5.2710, 4.7658, 4.7943], + device='cuda:0'), covar=tensor([0.0468, 0.0490, 0.0476, 0.0445, 0.0598, 0.0506, 0.0674, 0.0976], + device='cuda:0'), in_proj_covar=tensor([0.0269, 0.0286, 0.0299, 0.0260, 0.0264, 0.0249, 0.0268, 0.0313], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 03:52:59,633 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.3476, 2.3620, 2.4458, 2.4203, 2.4403, 2.4863, 2.5144, 2.5607], + device='cuda:0'), covar=tensor([0.0376, 0.0364, 0.0366, 0.0309, 0.0433, 0.0325, 0.0391, 0.0374], + device='cuda:0'), in_proj_covar=tensor([0.0087, 0.0081, 0.0084, 0.0078, 0.0091, 0.0083, 0.0100, 0.0073], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 03:53:47,062 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.0493, 1.9643, 3.1888, 2.6396, 3.2223, 3.2758, 3.0191, 3.1284], + device='cuda:0'), covar=tensor([0.1165, 0.1342, 0.0167, 0.0485, 0.0213, 0.0314, 0.0285, 0.0249], + device='cuda:0'), in_proj_covar=tensor([0.0099, 0.0104, 0.0088, 0.0153, 0.0086, 0.0098, 0.0090, 0.0086], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 03:53:59,994 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58389.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:54:31,651 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8013, 2.7831, 1.7980, 3.2675, 2.9527, 3.1642, 3.2701, 2.6088], + device='cuda:0'), covar=tensor([0.0737, 0.0845, 0.1710, 0.0764, 0.0762, 0.0559, 0.0635, 0.0933], + device='cuda:0'), in_proj_covar=tensor([0.0145, 0.0146, 0.0144, 0.0154, 0.0136, 0.0136, 0.0149, 0.0146], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 03:54:39,276 INFO [train.py:892] (0/4) Epoch 32, batch 900, loss[loss=0.1411, simple_loss=0.2208, pruned_loss=0.03071, over 19853.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.24, pruned_loss=0.04082, over 3904048.91 frames. ], batch size: 106, lr: 4.89e-03, grad_scale: 16.0 +2023-03-29 03:55:16,545 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1992, 2.8897, 3.2462, 2.9042, 3.4246, 3.4212, 3.9765, 4.3846], + device='cuda:0'), covar=tensor([0.0541, 0.1647, 0.1613, 0.2076, 0.1637, 0.1381, 0.0646, 0.0555], + device='cuda:0'), in_proj_covar=tensor([0.0256, 0.0241, 0.0270, 0.0255, 0.0298, 0.0258, 0.0234, 0.0258], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 03:55:21,926 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.766e+02 3.700e+02 4.222e+02 4.872e+02 9.847e+02, threshold=8.445e+02, percent-clipped=1.0 +2023-03-29 03:55:51,442 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58437.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:56:34,995 INFO [train.py:892] (0/4) Epoch 32, batch 950, loss[loss=0.143, simple_loss=0.2146, pruned_loss=0.03568, over 19865.00 frames. ], tot_loss[loss=0.1612, simple_loss=0.2404, pruned_loss=0.04099, over 3914619.50 frames. ], batch size: 157, lr: 4.89e-03, grad_scale: 16.0 +2023-03-29 03:58:05,377 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58497.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:58:11,645 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58500.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 03:58:24,183 INFO [train.py:892] (0/4) Epoch 32, batch 1000, loss[loss=0.1499, simple_loss=0.2214, pruned_loss=0.03927, over 19797.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.2395, pruned_loss=0.04094, over 3923530.86 frames. ], batch size: 168, lr: 4.89e-03, grad_scale: 16.0 +2023-03-29 03:59:09,106 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.374e+02 3.686e+02 4.298e+02 5.253e+02 9.198e+02, threshold=8.596e+02, percent-clipped=1.0 +2023-03-29 03:59:36,637 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4098, 2.4000, 2.5875, 2.4470, 2.4964, 2.5494, 2.4427, 2.5930], + device='cuda:0'), covar=tensor([0.0361, 0.0399, 0.0345, 0.0317, 0.0428, 0.0372, 0.0473, 0.0335], + device='cuda:0'), in_proj_covar=tensor([0.0087, 0.0081, 0.0083, 0.0079, 0.0090, 0.0083, 0.0100, 0.0073], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 04:00:00,879 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.82 vs. limit=5.0 +2023-03-29 04:00:20,490 INFO [train.py:892] (0/4) Epoch 32, batch 1050, loss[loss=0.166, simple_loss=0.242, pruned_loss=0.04502, over 19762.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2391, pruned_loss=0.04094, over 3929366.93 frames. ], batch size: 179, lr: 4.88e-03, grad_scale: 16.0 +2023-03-29 04:00:22,043 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-03-29 04:02:18,313 INFO [train.py:892] (0/4) Epoch 32, batch 1100, loss[loss=0.1594, simple_loss=0.2413, pruned_loss=0.03877, over 19645.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.2397, pruned_loss=0.04098, over 3934431.27 frames. ], batch size: 72, lr: 4.88e-03, grad_scale: 16.0 +2023-03-29 04:03:01,450 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.871e+02 3.808e+02 4.430e+02 5.351e+02 1.082e+03, threshold=8.860e+02, percent-clipped=2.0 +2023-03-29 04:04:08,055 INFO [train.py:892] (0/4) Epoch 32, batch 1150, loss[loss=0.1879, simple_loss=0.2673, pruned_loss=0.05424, over 19754.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.24, pruned_loss=0.04111, over 3937243.70 frames. ], batch size: 256, lr: 4.88e-03, grad_scale: 16.0 +2023-03-29 04:05:01,338 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4738, 1.9908, 2.2490, 2.7821, 3.0699, 3.2159, 3.0372, 3.1640], + device='cuda:0'), covar=tensor([0.1084, 0.1696, 0.1518, 0.0708, 0.0559, 0.0368, 0.0508, 0.0474], + device='cuda:0'), in_proj_covar=tensor([0.0161, 0.0168, 0.0178, 0.0151, 0.0137, 0.0133, 0.0125, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 04:05:05,346 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58681.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:05:38,887 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2625, 4.3328, 4.6315, 4.4436, 4.5853, 4.2076, 4.3929, 4.1675], + device='cuda:0'), covar=tensor([0.1483, 0.1778, 0.0998, 0.1291, 0.0882, 0.1084, 0.1927, 0.2225], + device='cuda:0'), in_proj_covar=tensor([0.0299, 0.0341, 0.0375, 0.0302, 0.0279, 0.0289, 0.0366, 0.0397], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 04:06:02,800 INFO [train.py:892] (0/4) Epoch 32, batch 1200, loss[loss=0.1543, simple_loss=0.2377, pruned_loss=0.03543, over 19593.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.2398, pruned_loss=0.04091, over 3940242.03 frames. ], batch size: 44, lr: 4.88e-03, grad_scale: 16.0 +2023-03-29 04:06:12,668 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3247, 3.5778, 3.1855, 2.6577, 3.2827, 3.5463, 3.4288, 3.5296], + device='cuda:0'), covar=tensor([0.0249, 0.0285, 0.0261, 0.0485, 0.0275, 0.0193, 0.0230, 0.0184], + device='cuda:0'), in_proj_covar=tensor([0.0105, 0.0098, 0.0100, 0.0103, 0.0106, 0.0088, 0.0088, 0.0088], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 04:06:40,758 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2636, 4.3498, 2.5620, 4.5783, 4.7728, 2.1218, 3.9580, 3.5650], + device='cuda:0'), covar=tensor([0.0619, 0.0791, 0.2711, 0.0748, 0.0488, 0.2722, 0.1074, 0.0823], + device='cuda:0'), in_proj_covar=tensor([0.0234, 0.0258, 0.0231, 0.0277, 0.0256, 0.0204, 0.0240, 0.0198], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 04:06:46,112 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.511e+02 3.575e+02 4.310e+02 5.528e+02 1.202e+03, threshold=8.619e+02, percent-clipped=2.0 +2023-03-29 04:07:29,248 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58742.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:07:54,600 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4160, 3.4726, 2.1623, 3.5854, 3.7362, 1.7536, 3.0342, 2.8938], + device='cuda:0'), covar=tensor([0.0802, 0.0923, 0.2748, 0.0876, 0.0586, 0.2676, 0.1193, 0.0945], + device='cuda:0'), in_proj_covar=tensor([0.0233, 0.0257, 0.0230, 0.0276, 0.0254, 0.0204, 0.0239, 0.0198], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 04:07:57,936 INFO [train.py:892] (0/4) Epoch 32, batch 1250, loss[loss=0.1397, simple_loss=0.2083, pruned_loss=0.03556, over 19866.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.2395, pruned_loss=0.04102, over 3941467.46 frames. ], batch size: 154, lr: 4.88e-03, grad_scale: 16.0 +2023-03-29 04:08:45,733 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58775.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:09:34,871 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58797.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:09:41,966 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58800.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:09:55,330 INFO [train.py:892] (0/4) Epoch 32, batch 1300, loss[loss=0.2085, simple_loss=0.2875, pruned_loss=0.0648, over 19618.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.2386, pruned_loss=0.04077, over 3944197.53 frames. ], batch size: 387, lr: 4.87e-03, grad_scale: 16.0 +2023-03-29 04:10:37,439 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.259e+02 3.414e+02 4.354e+02 4.908e+02 8.205e+02, threshold=8.708e+02, percent-clipped=0.0 +2023-03-29 04:10:59,846 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58836.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:11:20,209 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58845.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:11:28,497 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58848.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:11:46,353 INFO [train.py:892] (0/4) Epoch 32, batch 1350, loss[loss=0.1439, simple_loss=0.2199, pruned_loss=0.03394, over 19840.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.2383, pruned_loss=0.04071, over 3946794.70 frames. ], batch size: 197, lr: 4.87e-03, grad_scale: 16.0 +2023-03-29 04:12:25,863 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-29 04:13:08,284 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9405, 4.7904, 5.3262, 4.8269, 4.3279, 5.1205, 4.9937, 5.5391], + device='cuda:0'), covar=tensor([0.0924, 0.0409, 0.0378, 0.0382, 0.0819, 0.0474, 0.0476, 0.0318], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0225, 0.0225, 0.0237, 0.0210, 0.0249, 0.0235, 0.0221], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 04:13:39,835 INFO [train.py:892] (0/4) Epoch 32, batch 1400, loss[loss=0.1769, simple_loss=0.2606, pruned_loss=0.04654, over 19797.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.2389, pruned_loss=0.0409, over 3946851.98 frames. ], batch size: 51, lr: 4.87e-03, grad_scale: 16.0 +2023-03-29 04:13:40,773 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58906.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:14:27,791 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.427e+02 3.533e+02 4.416e+02 5.664e+02 1.235e+03, threshold=8.833e+02, percent-clipped=5.0 +2023-03-29 04:14:52,271 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.09 vs. limit=5.0 +2023-03-29 04:15:40,822 INFO [train.py:892] (0/4) Epoch 32, batch 1450, loss[loss=0.1553, simple_loss=0.2364, pruned_loss=0.03714, over 19795.00 frames. ], tot_loss[loss=0.1613, simple_loss=0.2401, pruned_loss=0.04125, over 3946596.10 frames. ], batch size: 74, lr: 4.87e-03, grad_scale: 16.0 +2023-03-29 04:16:06,647 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58967.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:17:32,724 INFO [train.py:892] (0/4) Epoch 32, batch 1500, loss[loss=0.1598, simple_loss=0.2384, pruned_loss=0.04062, over 19767.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.2392, pruned_loss=0.04083, over 3949200.79 frames. ], batch size: 198, lr: 4.87e-03, grad_scale: 16.0 +2023-03-29 04:17:42,601 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8274, 1.8497, 2.4239, 2.6140, 1.7891, 2.4485, 1.7540, 1.7647], + device='cuda:0'), covar=tensor([0.0784, 0.0854, 0.1155, 0.0647, 0.2404, 0.0818, 0.1504, 0.1680], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0329, 0.0249, 0.0204, 0.0248, 0.0210, 0.0219, 0.0217], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 04:18:18,937 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.783e+02 3.777e+02 4.436e+02 5.379e+02 1.000e+03, threshold=8.872e+02, percent-clipped=1.0 +2023-03-29 04:18:48,052 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59037.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:19:22,545 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-29 04:19:24,853 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.02 vs. limit=2.0 +2023-03-29 04:19:27,091 INFO [train.py:892] (0/4) Epoch 32, batch 1550, loss[loss=0.1334, simple_loss=0.2172, pruned_loss=0.02481, over 19793.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2386, pruned_loss=0.04039, over 3949339.28 frames. ], batch size: 105, lr: 4.86e-03, grad_scale: 16.0 +2023-03-29 04:20:03,759 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.93 vs. limit=5.0 +2023-03-29 04:21:25,975 INFO [train.py:892] (0/4) Epoch 32, batch 1600, loss[loss=0.1519, simple_loss=0.2345, pruned_loss=0.0347, over 19721.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.2383, pruned_loss=0.04038, over 3950142.43 frames. ], batch size: 104, lr: 4.86e-03, grad_scale: 16.0 +2023-03-29 04:22:08,642 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.456e+02 3.560e+02 4.342e+02 5.121e+02 7.778e+02, threshold=8.685e+02, percent-clipped=0.0 +2023-03-29 04:22:23,441 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59131.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:22:42,715 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3418, 4.8813, 5.0083, 4.7381, 5.2693, 3.2611, 4.2586, 2.6171], + device='cuda:0'), covar=tensor([0.0178, 0.0185, 0.0141, 0.0171, 0.0127, 0.0998, 0.0878, 0.1514], + device='cuda:0'), in_proj_covar=tensor([0.0105, 0.0147, 0.0114, 0.0135, 0.0120, 0.0136, 0.0143, 0.0128], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 04:23:16,323 INFO [train.py:892] (0/4) Epoch 32, batch 1650, loss[loss=0.1582, simple_loss=0.2371, pruned_loss=0.03961, over 19891.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.238, pruned_loss=0.04014, over 3949954.93 frames. ], batch size: 63, lr: 4.86e-03, grad_scale: 16.0 +2023-03-29 04:23:33,915 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59164.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:25:14,211 INFO [train.py:892] (0/4) Epoch 32, batch 1700, loss[loss=0.156, simple_loss=0.2403, pruned_loss=0.03584, over 19852.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.2396, pruned_loss=0.04095, over 3949155.57 frames. ], batch size: 78, lr: 4.86e-03, grad_scale: 16.0 +2023-03-29 04:25:21,701 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1553, 3.5007, 3.6775, 4.1046, 2.9792, 3.3022, 2.7324, 2.7080], + device='cuda:0'), covar=tensor([0.0501, 0.1854, 0.0879, 0.0372, 0.1748, 0.0847, 0.1328, 0.1653], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0330, 0.0250, 0.0205, 0.0249, 0.0210, 0.0220, 0.0218], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 04:25:29,237 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.3916, 2.4592, 2.6132, 2.4213, 2.4943, 2.5215, 2.4524, 2.5743], + device='cuda:0'), covar=tensor([0.0403, 0.0409, 0.0357, 0.0398, 0.0418, 0.0377, 0.0472, 0.0337], + device='cuda:0'), in_proj_covar=tensor([0.0088, 0.0081, 0.0085, 0.0080, 0.0092, 0.0085, 0.0102, 0.0074], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 04:25:50,047 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7256, 4.4984, 4.5408, 4.7607, 4.4493, 4.8796, 4.8374, 5.0572], + device='cuda:0'), covar=tensor([0.0703, 0.0429, 0.0484, 0.0370, 0.0656, 0.0455, 0.0406, 0.0332], + device='cuda:0'), in_proj_covar=tensor([0.0151, 0.0176, 0.0199, 0.0175, 0.0174, 0.0159, 0.0150, 0.0196], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 04:26:00,331 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.757e+02 3.824e+02 4.316e+02 5.380e+02 8.214e+02, threshold=8.632e+02, percent-clipped=0.0 +2023-03-29 04:26:01,291 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59225.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:27:06,703 INFO [train.py:892] (0/4) Epoch 32, batch 1750, loss[loss=0.1469, simple_loss=0.2243, pruned_loss=0.03472, over 19830.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2393, pruned_loss=0.04083, over 3949729.77 frames. ], batch size: 147, lr: 4.86e-03, grad_scale: 16.0 +2023-03-29 04:27:18,754 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59262.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:27:34,718 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5138, 3.3140, 3.5983, 2.6562, 3.8313, 3.1322, 3.2764, 3.6523], + device='cuda:0'), covar=tensor([0.0633, 0.0449, 0.0635, 0.0831, 0.0307, 0.0502, 0.0540, 0.0311], + device='cuda:0'), in_proj_covar=tensor([0.0080, 0.0088, 0.0085, 0.0113, 0.0081, 0.0084, 0.0081, 0.0075], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 04:28:46,845 INFO [train.py:892] (0/4) Epoch 32, batch 1800, loss[loss=0.1423, simple_loss=0.2206, pruned_loss=0.03204, over 19822.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2389, pruned_loss=0.0407, over 3949838.41 frames. ], batch size: 67, lr: 4.85e-03, grad_scale: 16.0 +2023-03-29 04:29:23,505 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.122e+02 3.692e+02 4.509e+02 5.287e+02 9.435e+02, threshold=9.017e+02, percent-clipped=2.0 +2023-03-29 04:29:45,394 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59337.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:29:47,918 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-29 04:30:19,890 INFO [train.py:892] (0/4) Epoch 32, batch 1850, loss[loss=0.1729, simple_loss=0.2616, pruned_loss=0.04204, over 19843.00 frames. ], tot_loss[loss=0.1614, simple_loss=0.2412, pruned_loss=0.0408, over 3949836.12 frames. ], batch size: 58, lr: 4.85e-03, grad_scale: 16.0 +2023-03-29 04:30:28,197 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-32.pt +2023-03-29 04:31:27,301 INFO [train.py:892] (0/4) Epoch 33, batch 0, loss[loss=0.1471, simple_loss=0.2253, pruned_loss=0.0344, over 19837.00 frames. ], tot_loss[loss=0.1471, simple_loss=0.2253, pruned_loss=0.0344, over 19837.00 frames. ], batch size: 239, lr: 4.78e-03, grad_scale: 16.0 +2023-03-29 04:31:27,302 INFO [train.py:917] (0/4) Computing validation loss +2023-03-29 04:32:02,115 INFO [train.py:926] (0/4) Epoch 33, validation: loss=0.1828, simple_loss=0.2501, pruned_loss=0.05775, over 2883724.00 frames. +2023-03-29 04:32:02,117 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-29 04:32:58,844 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59385.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:34:00,944 INFO [train.py:892] (0/4) Epoch 33, batch 50, loss[loss=0.1471, simple_loss=0.2283, pruned_loss=0.03289, over 19687.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2389, pruned_loss=0.0403, over 888769.83 frames. ], batch size: 45, lr: 4.77e-03, grad_scale: 32.0 +2023-03-29 04:34:31,973 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.011e+02 3.734e+02 4.113e+02 4.881e+02 1.279e+03, threshold=8.226e+02, percent-clipped=1.0 +2023-03-29 04:34:45,611 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59431.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:35:56,494 INFO [train.py:892] (0/4) Epoch 33, batch 100, loss[loss=0.1471, simple_loss=0.2337, pruned_loss=0.0302, over 19804.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2388, pruned_loss=0.04014, over 1568190.91 frames. ], batch size: 47, lr: 4.77e-03, grad_scale: 32.0 +2023-03-29 04:36:10,204 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4579, 3.1652, 3.4945, 2.6228, 3.7583, 3.0327, 3.2783, 3.5729], + device='cuda:0'), covar=tensor([0.0614, 0.0500, 0.0547, 0.0874, 0.0336, 0.0526, 0.0533, 0.0343], + device='cuda:0'), in_proj_covar=tensor([0.0080, 0.0088, 0.0086, 0.0114, 0.0081, 0.0084, 0.0082, 0.0075], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 04:36:36,984 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59479.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:37:08,450 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3700, 3.7190, 3.8916, 4.3803, 3.0487, 3.3676, 2.7520, 2.7089], + device='cuda:0'), covar=tensor([0.0498, 0.1853, 0.0858, 0.0403, 0.1940, 0.1013, 0.1394, 0.1673], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0331, 0.0251, 0.0206, 0.0250, 0.0210, 0.0220, 0.0219], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 04:37:31,801 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59502.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:37:51,789 INFO [train.py:892] (0/4) Epoch 33, batch 150, loss[loss=0.1454, simple_loss=0.2326, pruned_loss=0.02914, over 19681.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2402, pruned_loss=0.04173, over 2095479.47 frames. ], batch size: 56, lr: 4.77e-03, grad_scale: 32.0 +2023-03-29 04:38:07,228 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-29 04:38:12,537 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59520.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:38:22,510 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.374e+02 3.870e+02 4.435e+02 5.412e+02 1.132e+03, threshold=8.870e+02, percent-clipped=1.0 +2023-03-29 04:39:44,895 INFO [train.py:892] (0/4) Epoch 33, batch 200, loss[loss=0.1663, simple_loss=0.2401, pruned_loss=0.04626, over 19827.00 frames. ], tot_loss[loss=0.1613, simple_loss=0.2397, pruned_loss=0.0414, over 2507377.24 frames. ], batch size: 208, lr: 4.77e-03, grad_scale: 32.0 +2023-03-29 04:39:48,030 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59562.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:39:50,043 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59563.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:41:35,560 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59610.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:41:36,996 INFO [train.py:892] (0/4) Epoch 33, batch 250, loss[loss=0.1411, simple_loss=0.2128, pruned_loss=0.03468, over 19841.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.2382, pruned_loss=0.04084, over 2828280.91 frames. ], batch size: 143, lr: 4.77e-03, grad_scale: 32.0 +2023-03-29 04:41:48,402 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-29 04:42:08,469 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.374e+02 3.603e+02 4.122e+02 4.867e+02 8.945e+02, threshold=8.243e+02, percent-clipped=1.0 +2023-03-29 04:43:31,410 INFO [train.py:892] (0/4) Epoch 33, batch 300, loss[loss=0.1511, simple_loss=0.2347, pruned_loss=0.03373, over 19850.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.2387, pruned_loss=0.04075, over 3077971.32 frames. ], batch size: 81, lr: 4.76e-03, grad_scale: 32.0 +2023-03-29 04:44:18,206 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3146, 3.0164, 3.3013, 2.9256, 3.5434, 3.5361, 4.0993, 4.5569], + device='cuda:0'), covar=tensor([0.0491, 0.1586, 0.1489, 0.2126, 0.1512, 0.1255, 0.0589, 0.0429], + device='cuda:0'), in_proj_covar=tensor([0.0256, 0.0241, 0.0269, 0.0254, 0.0298, 0.0257, 0.0233, 0.0257], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 04:45:17,702 INFO [train.py:892] (0/4) Epoch 33, batch 350, loss[loss=0.1491, simple_loss=0.2216, pruned_loss=0.03832, over 19840.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2392, pruned_loss=0.04092, over 3270592.52 frames. ], batch size: 161, lr: 4.76e-03, grad_scale: 32.0 +2023-03-29 04:45:51,030 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.193e+02 3.458e+02 4.083e+02 4.831e+02 8.519e+02, threshold=8.167e+02, percent-clipped=1.0 +2023-03-29 04:47:10,195 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.12 vs. limit=5.0 +2023-03-29 04:47:13,245 INFO [train.py:892] (0/4) Epoch 33, batch 400, loss[loss=0.1765, simple_loss=0.2498, pruned_loss=0.05164, over 19861.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2386, pruned_loss=0.04037, over 3421829.67 frames. ], batch size: 46, lr: 4.76e-03, grad_scale: 32.0 +2023-03-29 04:47:27,495 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6755, 2.4460, 4.9067, 4.1482, 4.7875, 4.8266, 4.7128, 4.5915], + device='cuda:0'), covar=tensor([0.0729, 0.1455, 0.0133, 0.1171, 0.0152, 0.0244, 0.0202, 0.0185], + device='cuda:0'), in_proj_covar=tensor([0.0100, 0.0105, 0.0089, 0.0155, 0.0087, 0.0100, 0.0092, 0.0087], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 04:47:37,937 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4961, 3.7759, 4.0116, 4.5787, 3.1354, 3.4457, 3.0080, 2.8754], + device='cuda:0'), covar=tensor([0.0485, 0.2023, 0.0842, 0.0357, 0.1900, 0.0998, 0.1271, 0.1588], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0328, 0.0250, 0.0204, 0.0248, 0.0209, 0.0219, 0.0216], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 04:48:21,547 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.1188, 4.8343, 4.8582, 5.1432, 4.9105, 5.3837, 5.3006, 5.5046], + device='cuda:0'), covar=tensor([0.0731, 0.0389, 0.0430, 0.0322, 0.0565, 0.0357, 0.0418, 0.0277], + device='cuda:0'), in_proj_covar=tensor([0.0153, 0.0176, 0.0200, 0.0176, 0.0174, 0.0159, 0.0150, 0.0197], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 04:49:08,562 INFO [train.py:892] (0/4) Epoch 33, batch 450, loss[loss=0.125, simple_loss=0.1991, pruned_loss=0.02551, over 19894.00 frames. ], tot_loss[loss=0.1613, simple_loss=0.2402, pruned_loss=0.04114, over 3537798.54 frames. ], batch size: 47, lr: 4.76e-03, grad_scale: 32.0 +2023-03-29 04:49:29,036 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59820.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:49:39,970 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.419e+02 3.507e+02 4.283e+02 5.159e+02 1.029e+03, threshold=8.565e+02, percent-clipped=3.0 +2023-03-29 04:50:19,057 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.61 vs. limit=2.0 +2023-03-29 04:50:33,093 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59848.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:50:50,571 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.39 vs. limit=5.0 +2023-03-29 04:50:53,693 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59858.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:50:59,524 INFO [train.py:892] (0/4) Epoch 33, batch 500, loss[loss=0.1409, simple_loss=0.2156, pruned_loss=0.03309, over 19866.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.2397, pruned_loss=0.04106, over 3629947.19 frames. ], batch size: 129, lr: 4.76e-03, grad_scale: 32.0 +2023-03-29 04:51:19,520 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59868.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:52:00,906 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.7669, 6.0561, 6.1152, 5.9953, 5.8221, 6.1040, 5.4349, 5.5131], + device='cuda:0'), covar=tensor([0.0389, 0.0454, 0.0425, 0.0387, 0.0495, 0.0425, 0.0610, 0.0827], + device='cuda:0'), in_proj_covar=tensor([0.0274, 0.0290, 0.0303, 0.0265, 0.0270, 0.0255, 0.0271, 0.0318], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 04:52:06,932 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59889.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:52:36,586 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7634, 2.3273, 2.6902, 3.0177, 3.3837, 3.6098, 3.4821, 3.5522], + device='cuda:0'), covar=tensor([0.1075, 0.1740, 0.1345, 0.0763, 0.0546, 0.0322, 0.0462, 0.0472], + device='cuda:0'), in_proj_covar=tensor([0.0162, 0.0171, 0.0179, 0.0154, 0.0138, 0.0134, 0.0126, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 04:52:50,679 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9546, 4.7855, 5.3536, 4.8938, 4.2832, 5.0914, 4.9977, 5.5497], + device='cuda:0'), covar=tensor([0.0893, 0.0407, 0.0336, 0.0388, 0.0842, 0.0464, 0.0437, 0.0306], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0224, 0.0225, 0.0236, 0.0210, 0.0249, 0.0234, 0.0220], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 04:52:52,746 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59909.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:52:55,886 INFO [train.py:892] (0/4) Epoch 33, batch 550, loss[loss=0.1639, simple_loss=0.2475, pruned_loss=0.04014, over 19860.00 frames. ], tot_loss[loss=0.1613, simple_loss=0.2401, pruned_loss=0.04127, over 3702413.91 frames. ], batch size: 51, lr: 4.75e-03, grad_scale: 32.0 +2023-03-29 04:53:25,402 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.89 vs. limit=5.0 +2023-03-29 04:53:28,205 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.466e+02 3.637e+02 4.346e+02 5.121e+02 1.028e+03, threshold=8.693e+02, percent-clipped=1.0 +2023-03-29 04:54:25,914 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59950.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:54:50,047 INFO [train.py:892] (0/4) Epoch 33, batch 600, loss[loss=0.1625, simple_loss=0.2484, pruned_loss=0.03833, over 19809.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.2399, pruned_loss=0.04095, over 3758761.39 frames. ], batch size: 65, lr: 4.75e-03, grad_scale: 32.0 +2023-03-29 04:56:19,253 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-60000.pt +2023-03-29 04:56:47,916 INFO [train.py:892] (0/4) Epoch 33, batch 650, loss[loss=0.1465, simple_loss=0.2253, pruned_loss=0.03382, over 19879.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2391, pruned_loss=0.04047, over 3801343.09 frames. ], batch size: 95, lr: 4.75e-03, grad_scale: 32.0 +2023-03-29 04:57:20,322 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.343e+02 3.751e+02 4.539e+02 5.312e+02 8.817e+02, threshold=9.077e+02, percent-clipped=1.0 +2023-03-29 04:57:54,988 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60040.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:57:56,948 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0741, 3.7853, 3.9252, 4.0863, 3.8698, 4.0472, 4.1572, 4.3708], + device='cuda:0'), covar=tensor([0.0690, 0.0544, 0.0572, 0.0416, 0.0841, 0.0631, 0.0512, 0.0338], + device='cuda:0'), in_proj_covar=tensor([0.0154, 0.0177, 0.0201, 0.0176, 0.0176, 0.0161, 0.0151, 0.0198], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 04:58:12,353 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9905, 4.6891, 4.7783, 5.0241, 4.7883, 5.1527, 5.1234, 5.3195], + device='cuda:0'), covar=tensor([0.0613, 0.0421, 0.0473, 0.0327, 0.0619, 0.0429, 0.0398, 0.0303], + device='cuda:0'), in_proj_covar=tensor([0.0153, 0.0177, 0.0201, 0.0176, 0.0175, 0.0160, 0.0151, 0.0198], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 04:58:22,701 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60052.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:58:38,919 INFO [train.py:892] (0/4) Epoch 33, batch 700, loss[loss=0.1579, simple_loss=0.2434, pruned_loss=0.03626, over 19835.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2385, pruned_loss=0.04039, over 3834594.77 frames. ], batch size: 52, lr: 4.75e-03, grad_scale: 32.0 +2023-03-29 04:58:48,342 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60065.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:59:18,349 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60078.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 04:59:51,156 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4526, 4.3257, 4.8563, 4.4671, 4.0537, 4.7191, 4.4834, 5.0453], + device='cuda:0'), covar=tensor([0.0979, 0.0435, 0.0480, 0.0426, 0.0904, 0.0522, 0.0489, 0.0354], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0223, 0.0223, 0.0235, 0.0209, 0.0246, 0.0233, 0.0219], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 04:59:59,444 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2860, 4.8969, 4.9591, 5.2389, 4.8185, 5.4410, 5.3394, 5.5515], + device='cuda:0'), covar=tensor([0.0559, 0.0390, 0.0405, 0.0339, 0.0638, 0.0378, 0.0393, 0.0319], + device='cuda:0'), in_proj_covar=tensor([0.0154, 0.0178, 0.0202, 0.0177, 0.0176, 0.0160, 0.0151, 0.0198], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 05:00:08,328 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60101.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 05:00:28,994 INFO [train.py:892] (0/4) Epoch 33, batch 750, loss[loss=0.129, simple_loss=0.2067, pruned_loss=0.0257, over 19864.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2369, pruned_loss=0.03978, over 3862938.29 frames. ], batch size: 99, lr: 4.75e-03, grad_scale: 32.0 +2023-03-29 05:00:33,777 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60113.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 05:00:59,949 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.577e+02 3.667e+02 4.352e+02 5.271e+02 8.551e+02, threshold=8.703e+02, percent-clipped=0.0 +2023-03-29 05:01:03,258 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60126.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:01:13,910 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-29 05:01:34,584 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60139.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:02:17,853 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60158.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:02:23,083 INFO [train.py:892] (0/4) Epoch 33, batch 800, loss[loss=0.1575, simple_loss=0.2449, pruned_loss=0.0351, over 19690.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2378, pruned_loss=0.04018, over 3883154.74 frames. ], batch size: 56, lr: 4.74e-03, grad_scale: 32.0 +2023-03-29 05:03:47,819 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60198.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:04:02,021 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60204.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:04:08,000 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60206.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:04:17,809 INFO [train.py:892] (0/4) Epoch 33, batch 850, loss[loss=0.1455, simple_loss=0.2274, pruned_loss=0.03173, over 19872.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2384, pruned_loss=0.04016, over 3897912.12 frames. ], batch size: 89, lr: 4.74e-03, grad_scale: 32.0 +2023-03-29 05:04:48,179 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.68 vs. limit=5.0 +2023-03-29 05:04:48,774 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.588e+02 3.634e+02 4.278e+02 5.134e+02 8.109e+02, threshold=8.556e+02, percent-clipped=0.0 +2023-03-29 05:05:36,998 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60245.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:06:02,347 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7660, 3.3517, 3.6071, 3.1461, 4.0363, 4.0835, 4.5221, 5.0488], + device='cuda:0'), covar=tensor([0.0510, 0.1605, 0.1469, 0.2365, 0.1547, 0.1176, 0.0622, 0.0547], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0244, 0.0272, 0.0256, 0.0301, 0.0261, 0.0236, 0.0261], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 05:06:08,059 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60259.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:06:11,098 INFO [train.py:892] (0/4) Epoch 33, batch 900, loss[loss=0.1507, simple_loss=0.2351, pruned_loss=0.03313, over 19764.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2387, pruned_loss=0.04019, over 3907886.51 frames. ], batch size: 119, lr: 4.74e-03, grad_scale: 32.0 +2023-03-29 05:08:06,818 INFO [train.py:892] (0/4) Epoch 33, batch 950, loss[loss=0.1939, simple_loss=0.2668, pruned_loss=0.06049, over 19849.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2393, pruned_loss=0.04011, over 3916494.39 frames. ], batch size: 60, lr: 4.74e-03, grad_scale: 32.0 +2023-03-29 05:08:37,041 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.481e+02 3.823e+02 4.629e+02 5.710e+02 9.734e+02, threshold=9.258e+02, percent-clipped=1.0 +2023-03-29 05:08:48,578 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9102, 3.8006, 3.7686, 3.5412, 3.9253, 2.8441, 3.2249, 1.8517], + device='cuda:0'), covar=tensor([0.0225, 0.0234, 0.0159, 0.0207, 0.0158, 0.1038, 0.0624, 0.1718], + device='cuda:0'), in_proj_covar=tensor([0.0105, 0.0147, 0.0115, 0.0135, 0.0120, 0.0136, 0.0142, 0.0128], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 05:09:56,813 INFO [train.py:892] (0/4) Epoch 33, batch 1000, loss[loss=0.1425, simple_loss=0.2183, pruned_loss=0.03331, over 19888.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.2395, pruned_loss=0.04016, over 3922490.95 frames. ], batch size: 176, lr: 4.74e-03, grad_scale: 32.0 +2023-03-29 05:11:13,840 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60396.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 05:11:41,589 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60408.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 05:11:48,652 INFO [train.py:892] (0/4) Epoch 33, batch 1050, loss[loss=0.1649, simple_loss=0.2499, pruned_loss=0.03997, over 19756.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2385, pruned_loss=0.03986, over 3928379.00 frames. ], batch size: 253, lr: 4.74e-03, grad_scale: 32.0 +2023-03-29 05:12:11,042 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60421.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:12:21,532 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.510e+02 3.870e+02 4.484e+02 5.211e+02 8.559e+02, threshold=8.968e+02, percent-clipped=0.0 +2023-03-29 05:12:26,686 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2839, 5.5726, 5.7500, 5.5713, 5.4957, 5.3646, 5.4100, 5.2886], + device='cuda:0'), covar=tensor([0.1382, 0.1213, 0.0903, 0.1020, 0.0694, 0.0696, 0.1957, 0.1704], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0336, 0.0370, 0.0300, 0.0277, 0.0284, 0.0365, 0.0391], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 05:12:41,229 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60434.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:13:40,375 INFO [train.py:892] (0/4) Epoch 33, batch 1100, loss[loss=0.1421, simple_loss=0.2198, pruned_loss=0.03216, over 19875.00 frames. ], tot_loss[loss=0.1584, simple_loss=0.2377, pruned_loss=0.03957, over 3933509.95 frames. ], batch size: 159, lr: 4.73e-03, grad_scale: 32.0 +2023-03-29 05:13:41,527 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1668, 3.8765, 4.0543, 4.2072, 3.9297, 4.1877, 4.2869, 4.4907], + device='cuda:0'), covar=tensor([0.0706, 0.0490, 0.0542, 0.0418, 0.0701, 0.0529, 0.0452, 0.0289], + device='cuda:0'), in_proj_covar=tensor([0.0152, 0.0177, 0.0201, 0.0176, 0.0174, 0.0159, 0.0151, 0.0197], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 05:15:21,723 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60504.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:15:35,154 INFO [train.py:892] (0/4) Epoch 33, batch 1150, loss[loss=0.1747, simple_loss=0.2549, pruned_loss=0.04722, over 19932.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2372, pruned_loss=0.03962, over 3938096.22 frames. ], batch size: 51, lr: 4.73e-03, grad_scale: 32.0 +2023-03-29 05:15:40,722 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.00 vs. limit=5.0 +2023-03-29 05:16:08,393 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.786e+02 3.806e+02 4.491e+02 5.149e+02 7.976e+02, threshold=8.981e+02, percent-clipped=0.0 +2023-03-29 05:16:13,382 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7602, 2.4195, 2.6847, 3.0125, 3.4353, 3.6043, 3.5557, 3.6109], + device='cuda:0'), covar=tensor([0.1028, 0.1512, 0.1267, 0.0738, 0.0501, 0.0326, 0.0458, 0.0405], + device='cuda:0'), in_proj_covar=tensor([0.0161, 0.0169, 0.0177, 0.0152, 0.0136, 0.0134, 0.0125, 0.0116], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 05:16:54,239 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60545.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:16:54,843 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.76 vs. limit=5.0 +2023-03-29 05:17:08,797 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60552.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:17:12,934 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60554.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:17:29,246 INFO [train.py:892] (0/4) Epoch 33, batch 1200, loss[loss=0.2562, simple_loss=0.3288, pruned_loss=0.09178, over 19371.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.238, pruned_loss=0.04036, over 3941654.51 frames. ], batch size: 431, lr: 4.73e-03, grad_scale: 32.0 +2023-03-29 05:18:27,319 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0410, 3.3624, 2.8813, 2.4546, 2.9472, 3.1536, 3.1877, 3.2184], + device='cuda:0'), covar=tensor([0.0297, 0.0291, 0.0303, 0.0524, 0.0347, 0.0300, 0.0229, 0.0258], + device='cuda:0'), in_proj_covar=tensor([0.0107, 0.0100, 0.0102, 0.0104, 0.0107, 0.0090, 0.0090, 0.0089], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 05:18:42,755 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60593.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:19:21,750 INFO [train.py:892] (0/4) Epoch 33, batch 1250, loss[loss=0.1548, simple_loss=0.2397, pruned_loss=0.03488, over 19612.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.2391, pruned_loss=0.04086, over 3942990.04 frames. ], batch size: 51, lr: 4.73e-03, grad_scale: 32.0 +2023-03-29 05:19:52,010 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.888e+02 3.769e+02 4.430e+02 5.364e+02 1.022e+03, threshold=8.861e+02, percent-clipped=3.0 +2023-03-29 05:21:14,264 INFO [train.py:892] (0/4) Epoch 33, batch 1300, loss[loss=0.1294, simple_loss=0.2084, pruned_loss=0.02514, over 19735.00 frames. ], tot_loss[loss=0.1612, simple_loss=0.2403, pruned_loss=0.04099, over 3942338.10 frames. ], batch size: 77, lr: 4.73e-03, grad_scale: 32.0 +2023-03-29 05:22:34,649 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60696.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:23:03,427 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60708.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:23:08,869 INFO [train.py:892] (0/4) Epoch 33, batch 1350, loss[loss=0.1569, simple_loss=0.2307, pruned_loss=0.04154, over 19781.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.2402, pruned_loss=0.04069, over 3944392.10 frames. ], batch size: 155, lr: 4.72e-03, grad_scale: 16.0 +2023-03-29 05:23:20,206 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.6840, 1.5872, 1.7325, 1.7725, 1.6389, 1.7423, 1.5915, 1.7652], + device='cuda:0'), covar=tensor([0.0455, 0.0409, 0.0377, 0.0374, 0.0520, 0.0352, 0.0538, 0.0338], + device='cuda:0'), in_proj_covar=tensor([0.0089, 0.0083, 0.0086, 0.0080, 0.0093, 0.0086, 0.0103, 0.0075], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 05:23:26,311 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60718.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:23:33,851 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60721.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:23:36,076 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1380, 3.1847, 1.9472, 3.7762, 3.4730, 3.7138, 3.7918, 3.0150], + device='cuda:0'), covar=tensor([0.0613, 0.0655, 0.1623, 0.0599, 0.0581, 0.0452, 0.0580, 0.0756], + device='cuda:0'), in_proj_covar=tensor([0.0144, 0.0145, 0.0144, 0.0154, 0.0135, 0.0138, 0.0149, 0.0148], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 05:23:44,321 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.407e+02 3.398e+02 4.212e+02 5.272e+02 1.001e+03, threshold=8.423e+02, percent-clipped=0.0 +2023-03-29 05:23:51,433 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2652, 5.5725, 5.6195, 5.4836, 5.2647, 5.5575, 5.0094, 5.0474], + device='cuda:0'), covar=tensor([0.0418, 0.0427, 0.0439, 0.0425, 0.0558, 0.0517, 0.0689, 0.1007], + device='cuda:0'), in_proj_covar=tensor([0.0272, 0.0285, 0.0299, 0.0262, 0.0267, 0.0253, 0.0269, 0.0316], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 05:24:01,956 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60734.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:24:24,763 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60744.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:24:47,405 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5524, 2.8819, 2.9826, 3.4540, 2.3278, 2.9180, 2.4222, 2.2121], + device='cuda:0'), covar=tensor([0.0709, 0.2239, 0.1360, 0.0665, 0.2548, 0.1142, 0.1569, 0.2069], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0332, 0.0252, 0.0207, 0.0252, 0.0212, 0.0222, 0.0221], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 05:24:51,702 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60756.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:25:04,435 INFO [train.py:892] (0/4) Epoch 33, batch 1400, loss[loss=0.1763, simple_loss=0.2552, pruned_loss=0.04866, over 19788.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2399, pruned_loss=0.04055, over 3945930.50 frames. ], batch size: 168, lr: 4.72e-03, grad_scale: 16.0 +2023-03-29 05:25:09,671 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.5851, 1.4328, 1.6165, 1.6611, 1.5267, 1.6650, 1.4840, 1.6105], + device='cuda:0'), covar=tensor([0.0422, 0.0421, 0.0390, 0.0340, 0.0483, 0.0319, 0.0526, 0.0322], + device='cuda:0'), in_proj_covar=tensor([0.0090, 0.0084, 0.0087, 0.0081, 0.0094, 0.0087, 0.0104, 0.0076], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 05:25:21,852 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60769.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:25:43,041 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-03-29 05:25:44,815 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60779.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:25:50,845 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60782.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:26:54,138 INFO [train.py:892] (0/4) Epoch 33, batch 1450, loss[loss=0.1562, simple_loss=0.2334, pruned_loss=0.03949, over 19817.00 frames. ], tot_loss[loss=0.161, simple_loss=0.2404, pruned_loss=0.04077, over 3947950.19 frames. ], batch size: 148, lr: 4.72e-03, grad_scale: 16.0 +2023-03-29 05:27:25,502 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.764e+02 3.944e+02 4.679e+02 5.484e+02 1.088e+03, threshold=9.358e+02, percent-clipped=4.0 +2023-03-29 05:28:30,857 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60854.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:28:46,085 INFO [train.py:892] (0/4) Epoch 33, batch 1500, loss[loss=0.1348, simple_loss=0.214, pruned_loss=0.02776, over 19858.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2395, pruned_loss=0.04041, over 3948133.59 frames. ], batch size: 112, lr: 4.72e-03, grad_scale: 16.0 +2023-03-29 05:29:31,716 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2128, 3.9187, 4.0416, 4.1964, 3.9966, 4.2395, 4.2559, 4.4860], + device='cuda:0'), covar=tensor([0.0650, 0.0391, 0.0481, 0.0381, 0.0654, 0.0491, 0.0458, 0.0274], + device='cuda:0'), in_proj_covar=tensor([0.0154, 0.0179, 0.0203, 0.0179, 0.0176, 0.0161, 0.0154, 0.0199], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 05:30:06,250 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5610, 3.7598, 4.0618, 3.6934, 3.6022, 4.0156, 3.8075, 4.1269], + device='cuda:0'), covar=tensor([0.1201, 0.0442, 0.0543, 0.0510, 0.1273, 0.0688, 0.0627, 0.0523], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0226, 0.0225, 0.0239, 0.0211, 0.0249, 0.0238, 0.0221], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 05:30:19,453 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60902.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:30:40,147 INFO [train.py:892] (0/4) Epoch 33, batch 1550, loss[loss=0.1487, simple_loss=0.2371, pruned_loss=0.03014, over 19798.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2392, pruned_loss=0.0401, over 3949394.83 frames. ], batch size: 86, lr: 4.72e-03, grad_scale: 16.0 +2023-03-29 05:30:45,604 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1610, 3.0507, 5.0316, 3.4239, 3.6314, 3.4815, 2.5674, 2.8129], + device='cuda:0'), covar=tensor([0.1094, 0.3436, 0.0409, 0.1212, 0.2279, 0.1687, 0.2738, 0.2757], + device='cuda:0'), in_proj_covar=tensor([0.0352, 0.0390, 0.0351, 0.0288, 0.0374, 0.0377, 0.0376, 0.0347], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 05:31:12,864 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.819e+02 3.799e+02 4.581e+02 5.362e+02 9.999e+02, threshold=9.162e+02, percent-clipped=1.0 +2023-03-29 05:32:34,459 INFO [train.py:892] (0/4) Epoch 33, batch 1600, loss[loss=0.1699, simple_loss=0.248, pruned_loss=0.04588, over 19773.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.24, pruned_loss=0.04034, over 3948025.37 frames. ], batch size: 280, lr: 4.71e-03, grad_scale: 16.0 +2023-03-29 05:32:55,595 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9243, 4.6392, 4.7077, 4.9787, 4.7011, 5.1453, 5.0777, 5.2561], + device='cuda:0'), covar=tensor([0.0655, 0.0415, 0.0470, 0.0362, 0.0682, 0.0412, 0.0422, 0.0313], + device='cuda:0'), in_proj_covar=tensor([0.0155, 0.0180, 0.0205, 0.0179, 0.0178, 0.0162, 0.0154, 0.0200], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 05:33:31,623 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-29 05:34:10,695 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3552, 2.6168, 4.4499, 3.8295, 4.2157, 4.4275, 4.1963, 4.1907], + device='cuda:0'), covar=tensor([0.0528, 0.0966, 0.0100, 0.0658, 0.0136, 0.0202, 0.0168, 0.0155], + device='cuda:0'), in_proj_covar=tensor([0.0100, 0.0104, 0.0088, 0.0153, 0.0086, 0.0098, 0.0090, 0.0086], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 05:34:30,667 INFO [train.py:892] (0/4) Epoch 33, batch 1650, loss[loss=0.1712, simple_loss=0.2507, pruned_loss=0.0459, over 19642.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2401, pruned_loss=0.04023, over 3947566.50 frames. ], batch size: 299, lr: 4.71e-03, grad_scale: 16.0 +2023-03-29 05:34:33,933 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1784, 2.4466, 2.1850, 1.6736, 2.3060, 2.4608, 2.3217, 2.4051], + device='cuda:0'), covar=tensor([0.0413, 0.0340, 0.0355, 0.0606, 0.0402, 0.0310, 0.0330, 0.0309], + device='cuda:0'), in_proj_covar=tensor([0.0106, 0.0099, 0.0101, 0.0103, 0.0106, 0.0089, 0.0090, 0.0089], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 05:35:04,281 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.576e+02 3.785e+02 4.446e+02 5.538e+02 8.034e+02, threshold=8.891e+02, percent-clipped=0.0 +2023-03-29 05:36:28,677 INFO [train.py:892] (0/4) Epoch 33, batch 1700, loss[loss=0.1547, simple_loss=0.2319, pruned_loss=0.03869, over 19833.00 frames. ], tot_loss[loss=0.1592, simple_loss=0.2392, pruned_loss=0.03959, over 3948197.93 frames. ], batch size: 75, lr: 4.71e-03, grad_scale: 16.0 +2023-03-29 05:36:29,880 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61061.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:37:06,947 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61074.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:38:23,938 INFO [train.py:892] (0/4) Epoch 33, batch 1750, loss[loss=0.1664, simple_loss=0.2359, pruned_loss=0.04845, over 19704.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.2398, pruned_loss=0.04047, over 3947293.59 frames. ], batch size: 78, lr: 4.71e-03, grad_scale: 16.0 +2023-03-29 05:38:45,031 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61122.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:38:52,711 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.343e+02 3.701e+02 4.217e+02 4.986e+02 8.389e+02, threshold=8.433e+02, percent-clipped=0.0 +2023-03-29 05:39:59,800 INFO [train.py:892] (0/4) Epoch 33, batch 1800, loss[loss=0.1625, simple_loss=0.2494, pruned_loss=0.03781, over 19713.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2405, pruned_loss=0.04082, over 3947843.31 frames. ], batch size: 54, lr: 4.71e-03, grad_scale: 16.0 +2023-03-29 05:41:31,941 INFO [train.py:892] (0/4) Epoch 33, batch 1850, loss[loss=0.1498, simple_loss=0.2285, pruned_loss=0.03558, over 19679.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2415, pruned_loss=0.04035, over 3946579.12 frames. ], batch size: 55, lr: 4.70e-03, grad_scale: 16.0 +2023-03-29 05:41:39,691 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-33.pt +2023-03-29 05:42:34,506 INFO [train.py:892] (0/4) Epoch 34, batch 0, loss[loss=0.1612, simple_loss=0.2337, pruned_loss=0.04436, over 19835.00 frames. ], tot_loss[loss=0.1612, simple_loss=0.2337, pruned_loss=0.04436, over 19835.00 frames. ], batch size: 161, lr: 4.63e-03, grad_scale: 16.0 +2023-03-29 05:42:34,507 INFO [train.py:917] (0/4) Computing validation loss +2023-03-29 05:42:55,641 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5798, 4.0458, 3.8831, 3.8800, 4.0894, 3.9295, 3.8618, 3.6635], + device='cuda:0'), covar=tensor([0.2137, 0.1354, 0.1625, 0.1437, 0.0852, 0.0975, 0.1983, 0.2252], + device='cuda:0'), in_proj_covar=tensor([0.0299, 0.0340, 0.0372, 0.0303, 0.0279, 0.0289, 0.0368, 0.0397], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 05:43:02,776 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7871, 3.0407, 3.3518, 3.6092, 2.7965, 3.1211, 2.5221, 2.6204], + device='cuda:0'), covar=tensor([0.0527, 0.1610, 0.0884, 0.0503, 0.1847, 0.0762, 0.1353, 0.1506], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0329, 0.0251, 0.0205, 0.0249, 0.0211, 0.0220, 0.0218], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 05:43:07,404 INFO [train.py:926] (0/4) Epoch 34, validation: loss=0.1816, simple_loss=0.2491, pruned_loss=0.05706, over 2883724.00 frames. +2023-03-29 05:43:07,405 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-29 05:43:30,406 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.548e+02 3.493e+02 4.214e+02 5.020e+02 1.069e+03, threshold=8.428e+02, percent-clipped=3.0 +2023-03-29 05:44:23,149 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2236, 3.9394, 4.0798, 4.2564, 3.9933, 4.3055, 4.3490, 4.5606], + device='cuda:0'), covar=tensor([0.0738, 0.0456, 0.0565, 0.0425, 0.0790, 0.0541, 0.0477, 0.0316], + device='cuda:0'), in_proj_covar=tensor([0.0154, 0.0180, 0.0204, 0.0178, 0.0177, 0.0161, 0.0153, 0.0199], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 05:45:06,400 INFO [train.py:892] (0/4) Epoch 34, batch 50, loss[loss=0.1453, simple_loss=0.2124, pruned_loss=0.03907, over 19843.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2349, pruned_loss=0.04038, over 891459.57 frames. ], batch size: 143, lr: 4.63e-03, grad_scale: 16.0 +2023-03-29 05:47:01,406 INFO [train.py:892] (0/4) Epoch 34, batch 100, loss[loss=0.1587, simple_loss=0.2356, pruned_loss=0.04088, over 19800.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2364, pruned_loss=0.04011, over 1570302.30 frames. ], batch size: 126, lr: 4.63e-03, grad_scale: 16.0 +2023-03-29 05:47:24,794 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.578e+02 3.716e+02 4.447e+02 5.513e+02 1.175e+03, threshold=8.893e+02, percent-clipped=3.0 +2023-03-29 05:48:57,019 INFO [train.py:892] (0/4) Epoch 34, batch 150, loss[loss=0.1885, simple_loss=0.2722, pruned_loss=0.05235, over 19735.00 frames. ], tot_loss[loss=0.159, simple_loss=0.2375, pruned_loss=0.04028, over 2097838.82 frames. ], batch size: 99, lr: 4.63e-03, grad_scale: 16.0 +2023-03-29 05:49:18,079 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=61374.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:50:14,477 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0534, 2.8486, 2.9070, 3.0692, 2.9948, 3.1030, 3.1166, 3.3131], + device='cuda:0'), covar=tensor([0.0796, 0.0561, 0.0634, 0.0489, 0.0753, 0.0576, 0.0556, 0.0394], + device='cuda:0'), in_proj_covar=tensor([0.0155, 0.0180, 0.0205, 0.0179, 0.0178, 0.0162, 0.0154, 0.0200], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 05:50:56,752 INFO [train.py:892] (0/4) Epoch 34, batch 200, loss[loss=0.1601, simple_loss=0.2472, pruned_loss=0.03654, over 19867.00 frames. ], tot_loss[loss=0.158, simple_loss=0.2372, pruned_loss=0.03942, over 2508897.20 frames. ], batch size: 48, lr: 4.63e-03, grad_scale: 16.0 +2023-03-29 05:50:59,659 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61417.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:51:10,035 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=61422.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 05:51:10,677 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-03-29 05:51:17,519 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8358, 4.0850, 2.3601, 4.3262, 4.4760, 2.0046, 3.5572, 3.1419], + device='cuda:0'), covar=tensor([0.0839, 0.0854, 0.2810, 0.0709, 0.0546, 0.2935, 0.1164, 0.0974], + device='cuda:0'), in_proj_covar=tensor([0.0235, 0.0260, 0.0232, 0.0279, 0.0258, 0.0204, 0.0241, 0.0200], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 05:51:18,483 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.390e+02 3.871e+02 4.515e+02 5.267e+02 1.071e+03, threshold=9.030e+02, percent-clipped=3.0 +2023-03-29 05:52:53,114 INFO [train.py:892] (0/4) Epoch 34, batch 250, loss[loss=0.1469, simple_loss=0.2216, pruned_loss=0.03606, over 19828.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.238, pruned_loss=0.0401, over 2829146.08 frames. ], batch size: 93, lr: 4.62e-03, grad_scale: 16.0 +2023-03-29 05:54:06,405 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-29 05:54:47,109 INFO [train.py:892] (0/4) Epoch 34, batch 300, loss[loss=0.1501, simple_loss=0.2292, pruned_loss=0.03551, over 19616.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2384, pruned_loss=0.04009, over 3077245.86 frames. ], batch size: 65, lr: 4.62e-03, grad_scale: 16.0 +2023-03-29 05:55:09,937 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.400e+02 3.531e+02 4.342e+02 5.324e+02 1.066e+03, threshold=8.684e+02, percent-clipped=3.0 +2023-03-29 05:55:47,647 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-29 05:56:23,689 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2463, 3.0052, 3.3571, 2.9861, 3.5079, 3.4663, 4.1259, 4.5252], + device='cuda:0'), covar=tensor([0.0585, 0.1635, 0.1570, 0.2130, 0.1799, 0.1422, 0.0607, 0.0546], + device='cuda:0'), in_proj_covar=tensor([0.0255, 0.0241, 0.0268, 0.0255, 0.0299, 0.0257, 0.0235, 0.0258], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 05:56:44,387 INFO [train.py:892] (0/4) Epoch 34, batch 350, loss[loss=0.1653, simple_loss=0.2511, pruned_loss=0.03979, over 19662.00 frames. ], tot_loss[loss=0.1584, simple_loss=0.2378, pruned_loss=0.03944, over 3271511.39 frames. ], batch size: 57, lr: 4.62e-03, grad_scale: 16.0 +2023-03-29 05:58:21,585 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5063, 3.6024, 2.2866, 4.2712, 3.8865, 4.2207, 4.2753, 3.4029], + device='cuda:0'), covar=tensor([0.0585, 0.0628, 0.1527, 0.0513, 0.0553, 0.0354, 0.0597, 0.0757], + device='cuda:0'), in_proj_covar=tensor([0.0143, 0.0145, 0.0144, 0.0154, 0.0135, 0.0137, 0.0150, 0.0147], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 05:58:27,742 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3953, 2.7746, 2.9266, 3.2696, 2.3013, 2.9680, 2.1723, 2.2555], + device='cuda:0'), covar=tensor([0.0642, 0.1333, 0.1029, 0.0544, 0.2118, 0.0781, 0.1374, 0.1573], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0328, 0.0251, 0.0206, 0.0249, 0.0212, 0.0221, 0.0218], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 05:58:37,548 INFO [train.py:892] (0/4) Epoch 34, batch 400, loss[loss=0.1502, simple_loss=0.2413, pruned_loss=0.0296, over 19803.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2388, pruned_loss=0.04013, over 3421113.73 frames. ], batch size: 50, lr: 4.62e-03, grad_scale: 16.0 +2023-03-29 05:58:58,213 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2937, 2.6471, 4.3481, 3.8118, 4.1657, 4.2477, 4.0871, 4.0222], + device='cuda:0'), covar=tensor([0.0664, 0.1076, 0.0144, 0.0761, 0.0197, 0.0279, 0.0217, 0.0219], + device='cuda:0'), in_proj_covar=tensor([0.0100, 0.0103, 0.0088, 0.0152, 0.0086, 0.0098, 0.0090, 0.0086], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 05:59:04,287 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.292e+02 3.497e+02 4.355e+02 5.264e+02 1.050e+03, threshold=8.709e+02, percent-clipped=2.0 +2023-03-29 06:00:32,502 INFO [train.py:892] (0/4) Epoch 34, batch 450, loss[loss=0.1532, simple_loss=0.2356, pruned_loss=0.03536, over 19810.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.2398, pruned_loss=0.04074, over 3539346.08 frames. ], batch size: 96, lr: 4.62e-03, grad_scale: 16.0 +2023-03-29 06:00:48,254 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2744, 2.5377, 3.5551, 2.8274, 3.0028, 2.9098, 2.1668, 2.3096], + device='cuda:0'), covar=tensor([0.1286, 0.3006, 0.0745, 0.1237, 0.1893, 0.1481, 0.2809, 0.2791], + device='cuda:0'), in_proj_covar=tensor([0.0350, 0.0390, 0.0349, 0.0287, 0.0374, 0.0377, 0.0377, 0.0346], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 06:01:48,319 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8235, 2.4324, 2.7749, 3.1137, 3.5499, 3.8335, 3.7708, 3.6377], + device='cuda:0'), covar=tensor([0.1017, 0.1593, 0.1303, 0.0700, 0.0426, 0.0323, 0.0379, 0.0574], + device='cuda:0'), in_proj_covar=tensor([0.0162, 0.0171, 0.0180, 0.0152, 0.0137, 0.0134, 0.0125, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 06:02:27,840 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3710, 3.2720, 4.9028, 3.7300, 3.8853, 3.7236, 2.6190, 2.9043], + device='cuda:0'), covar=tensor([0.0852, 0.3077, 0.0453, 0.1000, 0.1819, 0.1454, 0.2676, 0.2517], + device='cuda:0'), in_proj_covar=tensor([0.0349, 0.0388, 0.0347, 0.0286, 0.0372, 0.0375, 0.0374, 0.0344], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 06:02:28,763 INFO [train.py:892] (0/4) Epoch 34, batch 500, loss[loss=0.1471, simple_loss=0.2404, pruned_loss=0.02687, over 19730.00 frames. ], tot_loss[loss=0.161, simple_loss=0.2405, pruned_loss=0.04069, over 3628053.38 frames. ], batch size: 52, lr: 4.61e-03, grad_scale: 16.0 +2023-03-29 06:02:32,196 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=61717.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:02:51,077 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.296e+02 3.666e+02 4.435e+02 5.198e+02 9.761e+02, threshold=8.870e+02, percent-clipped=3.0 +2023-03-29 06:03:07,796 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61732.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:03:32,855 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61743.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:04:20,957 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=61765.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:04:22,376 INFO [train.py:892] (0/4) Epoch 34, batch 550, loss[loss=0.1467, simple_loss=0.226, pruned_loss=0.03365, over 19805.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2395, pruned_loss=0.0404, over 3700111.19 frames. ], batch size: 172, lr: 4.61e-03, grad_scale: 16.0 +2023-03-29 06:05:28,053 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61793.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:05:55,301 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61804.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:06:24,062 INFO [train.py:892] (0/4) Epoch 34, batch 600, loss[loss=0.1356, simple_loss=0.2158, pruned_loss=0.02768, over 19742.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.2399, pruned_loss=0.04073, over 3755627.03 frames. ], batch size: 106, lr: 4.61e-03, grad_scale: 16.0 +2023-03-29 06:06:47,556 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.635e+02 3.514e+02 4.079e+02 4.976e+02 9.879e+02, threshold=8.158e+02, percent-clipped=2.0 +2023-03-29 06:08:22,945 INFO [train.py:892] (0/4) Epoch 34, batch 650, loss[loss=0.1461, simple_loss=0.2295, pruned_loss=0.03138, over 19766.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.2391, pruned_loss=0.04053, over 3797519.08 frames. ], batch size: 116, lr: 4.61e-03, grad_scale: 16.0 +2023-03-29 06:08:45,427 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2876, 3.5835, 2.0076, 4.1377, 3.6703, 4.0818, 4.1535, 3.1182], + device='cuda:0'), covar=tensor([0.0673, 0.0611, 0.1762, 0.0671, 0.0646, 0.0486, 0.0667, 0.0836], + device='cuda:0'), in_proj_covar=tensor([0.0145, 0.0148, 0.0146, 0.0157, 0.0137, 0.0139, 0.0152, 0.0150], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 06:10:09,690 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61914.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:10:14,458 INFO [train.py:892] (0/4) Epoch 34, batch 700, loss[loss=0.1463, simple_loss=0.2306, pruned_loss=0.03099, over 19882.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.2389, pruned_loss=0.04033, over 3832655.06 frames. ], batch size: 52, lr: 4.61e-03, grad_scale: 16.0 +2023-03-29 06:10:38,126 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.620e+02 3.763e+02 4.447e+02 5.196e+02 1.125e+03, threshold=8.894e+02, percent-clipped=5.0 +2023-03-29 06:11:17,248 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7234, 4.7538, 5.0959, 4.9303, 5.0403, 4.6604, 4.8468, 4.6739], + device='cuda:0'), covar=tensor([0.1540, 0.1715, 0.0952, 0.1245, 0.0779, 0.0943, 0.1886, 0.1923], + device='cuda:0'), in_proj_covar=tensor([0.0298, 0.0339, 0.0373, 0.0302, 0.0278, 0.0286, 0.0365, 0.0394], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 06:12:03,170 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4891, 3.4056, 5.0304, 3.8764, 4.0054, 3.8830, 2.7285, 2.9880], + device='cuda:0'), covar=tensor([0.0807, 0.2986, 0.0401, 0.0941, 0.1797, 0.1328, 0.2679, 0.2439], + device='cuda:0'), in_proj_covar=tensor([0.0349, 0.0387, 0.0347, 0.0285, 0.0371, 0.0375, 0.0374, 0.0344], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 06:12:12,038 INFO [train.py:892] (0/4) Epoch 34, batch 750, loss[loss=0.1656, simple_loss=0.2503, pruned_loss=0.04045, over 19788.00 frames. ], tot_loss[loss=0.1592, simple_loss=0.2387, pruned_loss=0.03988, over 3858819.90 frames. ], batch size: 193, lr: 4.61e-03, grad_scale: 16.0 +2023-03-29 06:12:34,530 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61975.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:13:29,123 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-62000.pt +2023-03-29 06:14:10,208 INFO [train.py:892] (0/4) Epoch 34, batch 800, loss[loss=0.1436, simple_loss=0.222, pruned_loss=0.03263, over 19837.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2383, pruned_loss=0.03959, over 3880042.55 frames. ], batch size: 161, lr: 4.60e-03, grad_scale: 16.0 +2023-03-29 06:14:31,338 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.697e+02 3.764e+02 4.280e+02 5.112e+02 1.282e+03, threshold=8.560e+02, percent-clipped=3.0 +2023-03-29 06:14:43,416 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.4294, 5.8128, 5.9613, 5.7895, 5.6455, 5.6017, 5.6685, 5.5440], + device='cuda:0'), covar=tensor([0.1446, 0.1329, 0.0850, 0.1215, 0.0680, 0.0657, 0.1936, 0.1862], + device='cuda:0'), in_proj_covar=tensor([0.0296, 0.0338, 0.0371, 0.0302, 0.0278, 0.0286, 0.0363, 0.0392], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 06:15:03,118 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.85 vs. limit=5.0 +2023-03-29 06:15:50,103 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2761, 3.3077, 1.9759, 3.9332, 3.5620, 3.8692, 3.9609, 3.0856], + device='cuda:0'), covar=tensor([0.0639, 0.0646, 0.1719, 0.0504, 0.0610, 0.0435, 0.0474, 0.0825], + device='cuda:0'), in_proj_covar=tensor([0.0144, 0.0147, 0.0146, 0.0155, 0.0136, 0.0139, 0.0151, 0.0149], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 06:16:03,786 INFO [train.py:892] (0/4) Epoch 34, batch 850, loss[loss=0.1899, simple_loss=0.2663, pruned_loss=0.05674, over 19615.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.2391, pruned_loss=0.03992, over 3894482.50 frames. ], batch size: 351, lr: 4.60e-03, grad_scale: 16.0 +2023-03-29 06:16:13,698 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1850, 2.2119, 1.4996, 2.2679, 2.2432, 2.1941, 2.2362, 1.9006], + device='cuda:0'), covar=tensor([0.0771, 0.0887, 0.1356, 0.0754, 0.0733, 0.0731, 0.0678, 0.1107], + device='cuda:0'), in_proj_covar=tensor([0.0145, 0.0147, 0.0146, 0.0156, 0.0136, 0.0139, 0.0151, 0.0149], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 06:16:53,209 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62088.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:17:17,374 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62099.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:17:54,512 INFO [train.py:892] (0/4) Epoch 34, batch 900, loss[loss=0.1452, simple_loss=0.2293, pruned_loss=0.03054, over 19892.00 frames. ], tot_loss[loss=0.159, simple_loss=0.2387, pruned_loss=0.03969, over 3903926.46 frames. ], batch size: 88, lr: 4.60e-03, grad_scale: 16.0 +2023-03-29 06:18:09,148 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62122.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:18:16,037 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.740e+02 3.509e+02 4.216e+02 5.041e+02 1.543e+03, threshold=8.432e+02, percent-clipped=2.0 +2023-03-29 06:19:48,578 INFO [train.py:892] (0/4) Epoch 34, batch 950, loss[loss=0.1454, simple_loss=0.2255, pruned_loss=0.03267, over 19899.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2395, pruned_loss=0.04001, over 3914157.75 frames. ], batch size: 113, lr: 4.60e-03, grad_scale: 16.0 +2023-03-29 06:20:28,401 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62183.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:21:05,948 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.5245, 5.9155, 5.9811, 5.7998, 5.7195, 5.6559, 5.6581, 5.5461], + device='cuda:0'), covar=tensor([0.1359, 0.1535, 0.0887, 0.1142, 0.0625, 0.0713, 0.1976, 0.1853], + device='cuda:0'), in_proj_covar=tensor([0.0297, 0.0338, 0.0372, 0.0301, 0.0278, 0.0286, 0.0365, 0.0391], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 06:21:45,055 INFO [train.py:892] (0/4) Epoch 34, batch 1000, loss[loss=0.1391, simple_loss=0.2143, pruned_loss=0.03198, over 19884.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2391, pruned_loss=0.04004, over 3920341.21 frames. ], batch size: 92, lr: 4.60e-03, grad_scale: 16.0 +2023-03-29 06:22:08,034 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.587e+02 3.779e+02 4.502e+02 5.626e+02 1.320e+03, threshold=9.004e+02, percent-clipped=5.0 +2023-03-29 06:23:06,465 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8203, 4.5886, 5.1380, 4.6977, 4.2367, 4.9680, 4.8240, 5.3512], + device='cuda:0'), covar=tensor([0.0793, 0.0360, 0.0357, 0.0376, 0.0811, 0.0484, 0.0481, 0.0274], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0225, 0.0224, 0.0238, 0.0210, 0.0248, 0.0237, 0.0220], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 06:23:39,547 INFO [train.py:892] (0/4) Epoch 34, batch 1050, loss[loss=0.1671, simple_loss=0.2633, pruned_loss=0.03543, over 19734.00 frames. ], tot_loss[loss=0.159, simple_loss=0.2385, pruned_loss=0.0397, over 3928128.94 frames. ], batch size: 54, lr: 4.59e-03, grad_scale: 16.0 +2023-03-29 06:23:49,448 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62270.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:23:55,824 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62273.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:25:31,603 INFO [train.py:892] (0/4) Epoch 34, batch 1100, loss[loss=0.1876, simple_loss=0.2788, pruned_loss=0.04822, over 19726.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2389, pruned_loss=0.03987, over 3933211.24 frames. ], batch size: 54, lr: 4.59e-03, grad_scale: 16.0 +2023-03-29 06:25:50,518 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9331, 2.7237, 3.1586, 2.7467, 3.2981, 3.1273, 3.8040, 4.1974], + device='cuda:0'), covar=tensor([0.0626, 0.1869, 0.1611, 0.2219, 0.1622, 0.1651, 0.0655, 0.0585], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0245, 0.0272, 0.0257, 0.0302, 0.0261, 0.0238, 0.0262], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 06:25:55,918 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.864e+02 3.591e+02 4.112e+02 5.051e+02 8.825e+02, threshold=8.223e+02, percent-clipped=0.0 +2023-03-29 06:26:07,433 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7557, 4.4356, 4.4660, 4.1893, 4.6843, 3.0699, 3.8710, 2.3316], + device='cuda:0'), covar=tensor([0.0179, 0.0229, 0.0156, 0.0213, 0.0152, 0.1093, 0.0770, 0.1522], + device='cuda:0'), in_proj_covar=tensor([0.0107, 0.0149, 0.0115, 0.0136, 0.0121, 0.0136, 0.0143, 0.0130], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 06:26:07,903 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.71 vs. limit=5.0 +2023-03-29 06:26:15,539 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62334.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:26:34,497 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2991, 4.0916, 4.1054, 3.8564, 4.2733, 2.9926, 3.6372, 2.1454], + device='cuda:0'), covar=tensor([0.0215, 0.0261, 0.0165, 0.0210, 0.0163, 0.1093, 0.0655, 0.1567], + device='cuda:0'), in_proj_covar=tensor([0.0107, 0.0149, 0.0116, 0.0136, 0.0121, 0.0136, 0.0143, 0.0130], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 06:27:28,363 INFO [train.py:892] (0/4) Epoch 34, batch 1150, loss[loss=0.1555, simple_loss=0.2296, pruned_loss=0.04073, over 19872.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2388, pruned_loss=0.03986, over 3936549.23 frames. ], batch size: 108, lr: 4.59e-03, grad_scale: 16.0 +2023-03-29 06:28:19,815 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62388.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:28:26,300 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62391.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:28:46,316 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62399.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:29:25,702 INFO [train.py:892] (0/4) Epoch 34, batch 1200, loss[loss=0.187, simple_loss=0.2655, pruned_loss=0.05425, over 19650.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.2397, pruned_loss=0.04031, over 3940286.01 frames. ], batch size: 299, lr: 4.59e-03, grad_scale: 16.0 +2023-03-29 06:29:49,441 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.475e+02 3.662e+02 4.259e+02 5.186e+02 8.409e+02, threshold=8.517e+02, percent-clipped=1.0 +2023-03-29 06:30:12,707 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62436.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:30:39,291 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62447.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:30:51,223 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62452.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:31:20,400 INFO [train.py:892] (0/4) Epoch 34, batch 1250, loss[loss=0.1967, simple_loss=0.294, pruned_loss=0.04974, over 19671.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.2403, pruned_loss=0.04042, over 3940977.54 frames. ], batch size: 55, lr: 4.59e-03, grad_scale: 16.0 +2023-03-29 06:31:45,995 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62478.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:32:11,274 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5867, 2.6452, 2.7469, 2.6506, 2.6278, 2.6683, 2.6003, 2.8425], + device='cuda:0'), covar=tensor([0.0346, 0.0449, 0.0309, 0.0322, 0.0461, 0.0331, 0.0396, 0.0315], + device='cuda:0'), in_proj_covar=tensor([0.0090, 0.0084, 0.0086, 0.0080, 0.0094, 0.0086, 0.0103, 0.0076], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 06:32:15,654 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.48 vs. limit=5.0 +2023-03-29 06:32:39,085 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.3941, 2.4363, 1.5373, 2.6666, 2.4660, 2.5673, 2.6853, 2.1711], + device='cuda:0'), covar=tensor([0.0708, 0.0762, 0.1526, 0.0649, 0.0721, 0.0589, 0.0693, 0.0928], + device='cuda:0'), in_proj_covar=tensor([0.0142, 0.0144, 0.0144, 0.0153, 0.0134, 0.0137, 0.0149, 0.0146], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 06:32:56,943 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0091, 5.1978, 5.4384, 5.1764, 5.2328, 5.0361, 5.1427, 4.9093], + device='cuda:0'), covar=tensor([0.1551, 0.1482, 0.0862, 0.1341, 0.0748, 0.0793, 0.1939, 0.2091], + device='cuda:0'), in_proj_covar=tensor([0.0299, 0.0339, 0.0373, 0.0305, 0.0280, 0.0288, 0.0368, 0.0395], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 06:32:57,107 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2863, 2.5195, 4.4982, 3.7890, 4.2051, 4.4456, 4.2569, 4.1380], + device='cuda:0'), covar=tensor([0.0586, 0.1076, 0.0101, 0.0740, 0.0153, 0.0191, 0.0167, 0.0189], + device='cuda:0'), in_proj_covar=tensor([0.0101, 0.0104, 0.0089, 0.0153, 0.0087, 0.0099, 0.0090, 0.0087], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 06:33:12,370 INFO [train.py:892] (0/4) Epoch 34, batch 1300, loss[loss=0.1457, simple_loss=0.2263, pruned_loss=0.03257, over 19766.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2394, pruned_loss=0.03974, over 3943009.27 frames. ], batch size: 88, lr: 4.58e-03, grad_scale: 16.0 +2023-03-29 06:33:19,937 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4647, 2.7717, 2.4497, 1.9975, 2.5911, 2.7157, 2.8065, 2.7821], + device='cuda:0'), covar=tensor([0.0379, 0.0349, 0.0359, 0.0627, 0.0362, 0.0325, 0.0293, 0.0259], + device='cuda:0'), in_proj_covar=tensor([0.0108, 0.0101, 0.0103, 0.0105, 0.0108, 0.0091, 0.0091, 0.0091], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 06:33:37,403 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.393e+02 3.741e+02 4.359e+02 5.306e+02 1.139e+03, threshold=8.717e+02, percent-clipped=4.0 +2023-03-29 06:34:21,053 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62545.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:35:01,913 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.52 vs. limit=5.0 +2023-03-29 06:35:08,983 INFO [train.py:892] (0/4) Epoch 34, batch 1350, loss[loss=0.1685, simple_loss=0.2572, pruned_loss=0.03986, over 19804.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2391, pruned_loss=0.03955, over 3945591.45 frames. ], batch size: 68, lr: 4.58e-03, grad_scale: 16.0 +2023-03-29 06:35:17,977 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62570.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:35:50,124 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62585.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:36:41,129 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62606.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:37:03,314 INFO [train.py:892] (0/4) Epoch 34, batch 1400, loss[loss=0.2671, simple_loss=0.3437, pruned_loss=0.09522, over 19250.00 frames. ], tot_loss[loss=0.1585, simple_loss=0.2383, pruned_loss=0.03939, over 3946445.18 frames. ], batch size: 483, lr: 4.58e-03, grad_scale: 16.0 +2023-03-29 06:37:08,439 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62618.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:37:26,791 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.572e+02 3.612e+02 4.260e+02 5.220e+02 9.254e+02, threshold=8.520e+02, percent-clipped=0.0 +2023-03-29 06:37:35,432 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62629.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:38:13,446 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62646.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:38:58,244 INFO [train.py:892] (0/4) Epoch 34, batch 1450, loss[loss=0.1824, simple_loss=0.255, pruned_loss=0.05492, over 19762.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.2392, pruned_loss=0.03988, over 3947212.91 frames. ], batch size: 233, lr: 4.58e-03, grad_scale: 16.0 +2023-03-29 06:39:00,649 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62666.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:40:16,633 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2982, 4.3347, 4.6832, 4.4626, 4.6107, 4.1874, 4.4357, 4.2115], + device='cuda:0'), covar=tensor([0.1604, 0.1705, 0.0927, 0.1305, 0.0892, 0.1039, 0.1830, 0.2086], + device='cuda:0'), in_proj_covar=tensor([0.0301, 0.0340, 0.0374, 0.0306, 0.0280, 0.0290, 0.0369, 0.0396], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 06:40:20,914 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-29 06:40:56,699 INFO [train.py:892] (0/4) Epoch 34, batch 1500, loss[loss=0.1408, simple_loss=0.2235, pruned_loss=0.02902, over 19747.00 frames. ], tot_loss[loss=0.1592, simple_loss=0.2389, pruned_loss=0.03976, over 3947959.43 frames. ], batch size: 106, lr: 4.58e-03, grad_scale: 32.0 +2023-03-29 06:41:19,189 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.451e+02 3.877e+02 4.374e+02 5.206e+02 9.071e+02, threshold=8.749e+02, percent-clipped=3.0 +2023-03-29 06:41:22,442 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62727.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:41:34,373 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-29 06:42:11,169 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62747.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:42:53,782 INFO [train.py:892] (0/4) Epoch 34, batch 1550, loss[loss=0.1859, simple_loss=0.2674, pruned_loss=0.05218, over 19853.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2387, pruned_loss=0.0396, over 3948857.20 frames. ], batch size: 81, lr: 4.58e-03, grad_scale: 32.0 +2023-03-29 06:43:11,266 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62773.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:43:14,145 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-29 06:43:22,560 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62778.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:43:27,101 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-29 06:44:11,750 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7048, 5.0047, 5.0629, 4.9691, 4.6038, 5.0539, 4.5721, 4.5675], + device='cuda:0'), covar=tensor([0.0472, 0.0462, 0.0458, 0.0397, 0.0660, 0.0459, 0.0654, 0.0936], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0292, 0.0307, 0.0269, 0.0274, 0.0258, 0.0273, 0.0320], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 06:44:53,503 INFO [train.py:892] (0/4) Epoch 34, batch 1600, loss[loss=0.1576, simple_loss=0.2339, pruned_loss=0.04063, over 19770.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2393, pruned_loss=0.0397, over 3947518.79 frames. ], batch size: 241, lr: 4.57e-03, grad_scale: 32.0 +2023-03-29 06:45:16,011 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.873e+02 3.915e+02 4.396e+02 5.347e+02 9.230e+02, threshold=8.791e+02, percent-clipped=1.0 +2023-03-29 06:45:16,916 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62826.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:45:34,177 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62834.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:45:58,972 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-29 06:46:05,070 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.1658, 5.4042, 5.4886, 5.3948, 5.1357, 5.4498, 4.9656, 4.9393], + device='cuda:0'), covar=tensor([0.0408, 0.0445, 0.0456, 0.0393, 0.0567, 0.0483, 0.0621, 0.0916], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0295, 0.0309, 0.0271, 0.0276, 0.0261, 0.0276, 0.0323], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 06:46:37,377 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6939, 2.8867, 4.1173, 3.2127, 3.4045, 3.2703, 2.3998, 2.5442], + device='cuda:0'), covar=tensor([0.1176, 0.2824, 0.0564, 0.1075, 0.1758, 0.1508, 0.2714, 0.2703], + device='cuda:0'), in_proj_covar=tensor([0.0351, 0.0390, 0.0349, 0.0287, 0.0373, 0.0381, 0.0377, 0.0347], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 06:46:47,352 INFO [train.py:892] (0/4) Epoch 34, batch 1650, loss[loss=0.1476, simple_loss=0.23, pruned_loss=0.03261, over 19883.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2387, pruned_loss=0.03973, over 3948462.61 frames. ], batch size: 52, lr: 4.57e-03, grad_scale: 32.0 +2023-03-29 06:48:07,217 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9677, 4.6184, 4.6811, 4.4194, 4.9446, 3.3101, 4.0157, 2.3746], + device='cuda:0'), covar=tensor([0.0163, 0.0207, 0.0143, 0.0183, 0.0126, 0.0817, 0.0748, 0.1509], + device='cuda:0'), in_proj_covar=tensor([0.0107, 0.0149, 0.0116, 0.0137, 0.0121, 0.0136, 0.0143, 0.0129], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 06:48:09,082 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62901.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:48:42,513 INFO [train.py:892] (0/4) Epoch 34, batch 1700, loss[loss=0.1708, simple_loss=0.2447, pruned_loss=0.04841, over 19759.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.239, pruned_loss=0.03987, over 3948667.85 frames. ], batch size: 198, lr: 4.57e-03, grad_scale: 32.0 +2023-03-29 06:49:05,118 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.243e+02 3.824e+02 4.369e+02 5.372e+02 9.431e+02, threshold=8.739e+02, percent-clipped=1.0 +2023-03-29 06:49:12,495 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-29 06:49:14,336 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62929.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:49:41,150 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62941.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:50:16,194 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0789, 3.0375, 1.9545, 3.6546, 3.2835, 3.5347, 3.6196, 2.9410], + device='cuda:0'), covar=tensor([0.0642, 0.0718, 0.1715, 0.0641, 0.0720, 0.0510, 0.0632, 0.0807], + device='cuda:0'), in_proj_covar=tensor([0.0145, 0.0146, 0.0146, 0.0155, 0.0137, 0.0139, 0.0150, 0.0149], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 06:50:34,870 INFO [train.py:892] (0/4) Epoch 34, batch 1750, loss[loss=0.1508, simple_loss=0.2172, pruned_loss=0.04225, over 19849.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2388, pruned_loss=0.03991, over 3948300.48 frames. ], batch size: 145, lr: 4.57e-03, grad_scale: 32.0 +2023-03-29 06:50:56,671 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62977.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:51:43,620 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.1434, 4.8356, 4.8743, 5.1719, 4.8333, 5.3534, 5.2562, 5.4563], + device='cuda:0'), covar=tensor([0.0706, 0.0409, 0.0501, 0.0404, 0.0672, 0.0459, 0.0439, 0.0344], + device='cuda:0'), in_proj_covar=tensor([0.0156, 0.0181, 0.0205, 0.0181, 0.0180, 0.0162, 0.0155, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 06:52:02,733 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-03-29 06:52:08,786 INFO [train.py:892] (0/4) Epoch 34, batch 1800, loss[loss=0.1356, simple_loss=0.2176, pruned_loss=0.02677, over 19661.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.238, pruned_loss=0.03959, over 3948363.42 frames. ], batch size: 67, lr: 4.57e-03, grad_scale: 32.0 +2023-03-29 06:52:20,232 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63022.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:52:26,906 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.273e+02 3.774e+02 4.619e+02 5.631e+02 1.047e+03, threshold=9.238e+02, percent-clipped=1.0 +2023-03-29 06:52:31,239 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8226, 3.5586, 3.6824, 3.8513, 3.6337, 3.8252, 3.9173, 4.1315], + device='cuda:0'), covar=tensor([0.0719, 0.0485, 0.0585, 0.0433, 0.0741, 0.0606, 0.0504, 0.0325], + device='cuda:0'), in_proj_covar=tensor([0.0156, 0.0181, 0.0205, 0.0181, 0.0180, 0.0162, 0.0156, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 06:53:07,160 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63047.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:53:40,510 INFO [train.py:892] (0/4) Epoch 34, batch 1850, loss[loss=0.1583, simple_loss=0.2536, pruned_loss=0.03144, over 19821.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2387, pruned_loss=0.03924, over 3948955.97 frames. ], batch size: 57, lr: 4.56e-03, grad_scale: 32.0 +2023-03-29 06:53:48,066 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-34.pt +2023-03-29 06:54:44,125 INFO [train.py:892] (0/4) Epoch 35, batch 0, loss[loss=0.1646, simple_loss=0.2404, pruned_loss=0.04442, over 19699.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2404, pruned_loss=0.04442, over 19699.00 frames. ], batch size: 46, lr: 4.50e-03, grad_scale: 32.0 +2023-03-29 06:54:44,126 INFO [train.py:917] (0/4) Computing validation loss +2023-03-29 06:55:00,094 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0206, 2.7345, 3.1762, 3.2666, 3.6577, 4.0715, 3.8411, 3.9261], + device='cuda:0'), covar=tensor([0.0964, 0.1479, 0.1203, 0.0718, 0.0501, 0.0253, 0.0412, 0.0385], + device='cuda:0'), in_proj_covar=tensor([0.0162, 0.0169, 0.0179, 0.0152, 0.0138, 0.0134, 0.0125, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 06:55:18,886 INFO [train.py:926] (0/4) Epoch 35, validation: loss=0.1837, simple_loss=0.2499, pruned_loss=0.05876, over 2883724.00 frames. +2023-03-29 06:55:18,887 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-29 06:56:18,621 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63095.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:56:21,703 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-03-29 06:57:16,282 INFO [train.py:892] (0/4) Epoch 35, batch 50, loss[loss=0.1556, simple_loss=0.2396, pruned_loss=0.03579, over 19718.00 frames. ], tot_loss[loss=0.1527, simple_loss=0.2312, pruned_loss=0.0371, over 891479.04 frames. ], batch size: 62, lr: 4.50e-03, grad_scale: 32.0 +2023-03-29 06:57:28,114 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.370e+02 3.277e+02 3.899e+02 4.647e+02 1.054e+03, threshold=7.797e+02, percent-clipped=1.0 +2023-03-29 06:57:36,686 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63129.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 06:58:46,037 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5580, 3.1979, 3.5588, 2.9877, 3.7231, 3.7070, 4.3907, 4.8005], + device='cuda:0'), covar=tensor([0.0505, 0.1614, 0.1405, 0.2265, 0.1690, 0.1328, 0.0545, 0.0480], + device='cuda:0'), in_proj_covar=tensor([0.0254, 0.0241, 0.0269, 0.0254, 0.0299, 0.0257, 0.0234, 0.0260], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 06:58:51,613 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7004, 2.3191, 3.6886, 3.1797, 3.6169, 3.6922, 3.4679, 3.4805], + device='cuda:0'), covar=tensor([0.0709, 0.1019, 0.0130, 0.0466, 0.0161, 0.0243, 0.0202, 0.0206], + device='cuda:0'), in_proj_covar=tensor([0.0100, 0.0104, 0.0089, 0.0152, 0.0086, 0.0099, 0.0090, 0.0087], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 06:59:14,200 INFO [train.py:892] (0/4) Epoch 35, batch 100, loss[loss=0.1584, simple_loss=0.2484, pruned_loss=0.03423, over 19901.00 frames. ], tot_loss[loss=0.1532, simple_loss=0.2321, pruned_loss=0.03712, over 1569809.83 frames. ], batch size: 50, lr: 4.49e-03, grad_scale: 32.0 +2023-03-29 07:00:21,335 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-03-29 07:00:25,487 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63201.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:00:31,873 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7966, 2.3200, 3.7564, 3.2425, 3.7384, 3.7762, 3.5335, 3.5316], + device='cuda:0'), covar=tensor([0.0669, 0.1053, 0.0129, 0.0519, 0.0150, 0.0240, 0.0209, 0.0214], + device='cuda:0'), in_proj_covar=tensor([0.0101, 0.0105, 0.0090, 0.0153, 0.0087, 0.0099, 0.0091, 0.0087], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 07:00:41,560 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9480, 3.2389, 3.4633, 3.8540, 2.7926, 3.2008, 2.6079, 2.6002], + device='cuda:0'), covar=tensor([0.0535, 0.1921, 0.0982, 0.0461, 0.1930, 0.0923, 0.1427, 0.1628], + device='cuda:0'), in_proj_covar=tensor([0.0249, 0.0334, 0.0252, 0.0209, 0.0252, 0.0214, 0.0224, 0.0221], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 07:01:09,272 INFO [train.py:892] (0/4) Epoch 35, batch 150, loss[loss=0.155, simple_loss=0.2256, pruned_loss=0.04219, over 19785.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2331, pruned_loss=0.03725, over 2096793.78 frames. ], batch size: 152, lr: 4.49e-03, grad_scale: 16.0 +2023-03-29 07:01:10,364 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63221.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:01:22,850 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.863e+02 3.708e+02 4.219e+02 5.359e+02 8.315e+02, threshold=8.439e+02, percent-clipped=1.0 +2023-03-29 07:01:57,149 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63241.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:02:14,839 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63249.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:03:02,043 INFO [train.py:892] (0/4) Epoch 35, batch 200, loss[loss=0.1468, simple_loss=0.2163, pruned_loss=0.03864, over 19807.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.2366, pruned_loss=0.03854, over 2508012.75 frames. ], batch size: 132, lr: 4.49e-03, grad_scale: 16.0 +2023-03-29 07:03:28,838 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63282.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:03:45,914 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63289.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:04:33,900 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2681, 3.2474, 4.9373, 3.5636, 3.8974, 3.7044, 2.5536, 2.8043], + device='cuda:0'), covar=tensor([0.0932, 0.3300, 0.0415, 0.1105, 0.1709, 0.1492, 0.2741, 0.2707], + device='cuda:0'), in_proj_covar=tensor([0.0353, 0.0391, 0.0351, 0.0288, 0.0375, 0.0382, 0.0379, 0.0349], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 07:04:53,502 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5039, 3.8564, 4.0569, 4.6321, 3.0263, 3.4644, 3.1065, 3.0718], + device='cuda:0'), covar=tensor([0.0507, 0.1983, 0.0814, 0.0352, 0.2054, 0.1050, 0.1150, 0.1391], + device='cuda:0'), in_proj_covar=tensor([0.0248, 0.0331, 0.0251, 0.0207, 0.0250, 0.0213, 0.0223, 0.0219], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 07:04:56,636 INFO [train.py:892] (0/4) Epoch 35, batch 250, loss[loss=0.1506, simple_loss=0.2316, pruned_loss=0.03476, over 19857.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2353, pruned_loss=0.03793, over 2828175.55 frames. ], batch size: 122, lr: 4.49e-03, grad_scale: 16.0 +2023-03-29 07:04:59,430 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63322.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:05:09,784 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.149e+02 3.669e+02 4.457e+02 5.249e+02 9.820e+02, threshold=8.914e+02, percent-clipped=1.0 +2023-03-29 07:06:47,969 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63370.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:06:49,438 INFO [train.py:892] (0/4) Epoch 35, batch 300, loss[loss=0.1592, simple_loss=0.2424, pruned_loss=0.03799, over 19724.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.2358, pruned_loss=0.03821, over 3077459.07 frames. ], batch size: 80, lr: 4.49e-03, grad_scale: 16.0 +2023-03-29 07:08:45,481 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63420.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:08:46,588 INFO [train.py:892] (0/4) Epoch 35, batch 350, loss[loss=0.1438, simple_loss=0.2262, pruned_loss=0.03065, over 19759.00 frames. ], tot_loss[loss=0.156, simple_loss=0.2359, pruned_loss=0.03803, over 3271368.86 frames. ], batch size: 88, lr: 4.49e-03, grad_scale: 16.0 +2023-03-29 07:08:49,481 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63422.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:09:00,234 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.282e+02 3.589e+02 4.143e+02 4.777e+02 8.790e+02, threshold=8.287e+02, percent-clipped=0.0 +2023-03-29 07:09:08,259 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63429.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:10:11,747 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-29 07:10:44,412 INFO [train.py:892] (0/4) Epoch 35, batch 400, loss[loss=0.139, simple_loss=0.2216, pruned_loss=0.02823, over 19745.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2359, pruned_loss=0.03823, over 3422483.89 frames. ], batch size: 77, lr: 4.48e-03, grad_scale: 16.0 +2023-03-29 07:10:58,924 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63477.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:11:07,746 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63481.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:11:11,953 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63483.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:11:26,807 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9935, 4.9010, 5.4159, 4.9668, 4.4004, 5.1574, 5.0602, 5.6202], + device='cuda:0'), covar=tensor([0.0859, 0.0353, 0.0324, 0.0331, 0.0733, 0.0449, 0.0438, 0.0308], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0225, 0.0225, 0.0238, 0.0209, 0.0250, 0.0239, 0.0221], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 07:12:36,905 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6848, 4.4435, 4.4092, 4.1031, 4.6892, 3.0098, 3.6373, 2.1571], + device='cuda:0'), covar=tensor([0.0292, 0.0257, 0.0236, 0.0264, 0.0262, 0.1201, 0.1110, 0.2041], + device='cuda:0'), in_proj_covar=tensor([0.0107, 0.0148, 0.0115, 0.0136, 0.0121, 0.0136, 0.0143, 0.0129], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 07:12:42,323 INFO [train.py:892] (0/4) Epoch 35, batch 450, loss[loss=0.1708, simple_loss=0.2588, pruned_loss=0.04145, over 19944.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2365, pruned_loss=0.03841, over 3540553.18 frames. ], batch size: 52, lr: 4.48e-03, grad_scale: 16.0 +2023-03-29 07:12:56,127 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.456e+02 3.739e+02 4.362e+02 5.360e+02 8.901e+02, threshold=8.724e+02, percent-clipped=1.0 +2023-03-29 07:14:31,899 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63569.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:14:35,585 INFO [train.py:892] (0/4) Epoch 35, batch 500, loss[loss=0.1469, simple_loss=0.23, pruned_loss=0.03188, over 19805.00 frames. ], tot_loss[loss=0.158, simple_loss=0.2376, pruned_loss=0.0392, over 3631142.92 frames. ], batch size: 126, lr: 4.48e-03, grad_scale: 16.0 +2023-03-29 07:14:52,553 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63577.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:15:05,438 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-29 07:15:10,989 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63586.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:16:32,815 INFO [train.py:892] (0/4) Epoch 35, batch 550, loss[loss=0.1568, simple_loss=0.2381, pruned_loss=0.0378, over 19612.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2401, pruned_loss=0.04025, over 3697778.93 frames. ], batch size: 51, lr: 4.48e-03, grad_scale: 16.0 +2023-03-29 07:16:47,191 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.269e+02 3.713e+02 4.298e+02 5.040e+02 8.851e+02, threshold=8.596e+02, percent-clipped=1.0 +2023-03-29 07:16:54,423 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63630.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:17:34,927 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63647.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:18:28,410 INFO [train.py:892] (0/4) Epoch 35, batch 600, loss[loss=0.1602, simple_loss=0.2368, pruned_loss=0.04185, over 19797.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.2398, pruned_loss=0.03964, over 3752333.38 frames. ], batch size: 224, lr: 4.48e-03, grad_scale: 16.0 +2023-03-29 07:20:26,999 INFO [train.py:892] (0/4) Epoch 35, batch 650, loss[loss=0.1566, simple_loss=0.2379, pruned_loss=0.0377, over 19776.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.239, pruned_loss=0.03934, over 3796183.50 frames. ], batch size: 113, lr: 4.48e-03, grad_scale: 16.0 +2023-03-29 07:20:40,711 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.573e+02 3.756e+02 4.198e+02 4.970e+02 9.612e+02, threshold=8.396e+02, percent-clipped=1.0 +2023-03-29 07:21:04,108 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0376, 3.2870, 2.8126, 2.3595, 2.9008, 3.0549, 3.1328, 3.1351], + device='cuda:0'), covar=tensor([0.0272, 0.0245, 0.0280, 0.0516, 0.0361, 0.0336, 0.0246, 0.0253], + device='cuda:0'), in_proj_covar=tensor([0.0108, 0.0102, 0.0104, 0.0105, 0.0108, 0.0091, 0.0092, 0.0091], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 07:21:05,994 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2908, 3.5897, 3.6969, 4.3231, 2.9189, 3.4451, 2.6148, 2.6754], + device='cuda:0'), covar=tensor([0.0521, 0.1823, 0.0979, 0.0400, 0.2048, 0.0936, 0.1409, 0.1674], + device='cuda:0'), in_proj_covar=tensor([0.0249, 0.0332, 0.0250, 0.0207, 0.0249, 0.0212, 0.0223, 0.0219], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 07:22:16,877 INFO [train.py:892] (0/4) Epoch 35, batch 700, loss[loss=0.1343, simple_loss=0.2194, pruned_loss=0.02456, over 19760.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2391, pruned_loss=0.03932, over 3830281.39 frames. ], batch size: 113, lr: 4.47e-03, grad_scale: 16.0 +2023-03-29 07:22:29,868 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63776.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:22:33,900 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63778.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:23:50,844 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63810.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:23:53,136 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4851, 4.2516, 4.2466, 3.9983, 4.4762, 3.1318, 3.7480, 2.4291], + device='cuda:0'), covar=tensor([0.0175, 0.0217, 0.0157, 0.0202, 0.0141, 0.0977, 0.0637, 0.1343], + device='cuda:0'), in_proj_covar=tensor([0.0107, 0.0150, 0.0116, 0.0138, 0.0122, 0.0137, 0.0144, 0.0130], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 07:24:14,492 INFO [train.py:892] (0/4) Epoch 35, batch 750, loss[loss=0.1571, simple_loss=0.2433, pruned_loss=0.03544, over 19911.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2378, pruned_loss=0.03892, over 3858339.26 frames. ], batch size: 53, lr: 4.47e-03, grad_scale: 16.0 +2023-03-29 07:24:28,541 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.314e+02 3.905e+02 4.508e+02 5.292e+02 1.021e+03, threshold=9.015e+02, percent-clipped=3.0 +2023-03-29 07:25:13,063 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63845.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:26:13,516 INFO [train.py:892] (0/4) Epoch 35, batch 800, loss[loss=0.1796, simple_loss=0.2639, pruned_loss=0.04768, over 19643.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2385, pruned_loss=0.03902, over 3878598.79 frames. ], batch size: 343, lr: 4.47e-03, grad_scale: 16.0 +2023-03-29 07:26:14,575 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63871.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:26:28,814 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63877.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:27:35,428 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-03-29 07:27:37,113 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63906.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:28:01,234 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63916.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:28:11,333 INFO [train.py:892] (0/4) Epoch 35, batch 850, loss[loss=0.1401, simple_loss=0.2212, pruned_loss=0.02951, over 19707.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.2394, pruned_loss=0.03912, over 3892036.62 frames. ], batch size: 101, lr: 4.47e-03, grad_scale: 16.0 +2023-03-29 07:28:21,645 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63925.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:28:21,665 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63925.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:28:25,041 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.864e+02 3.650e+02 4.447e+02 5.355e+02 8.426e+02, threshold=8.894e+02, percent-clipped=0.0 +2023-03-29 07:29:02,891 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63942.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:29:50,692 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0324, 2.5882, 2.9884, 3.1547, 3.6739, 4.1348, 3.9479, 4.0070], + device='cuda:0'), covar=tensor([0.0907, 0.1553, 0.1311, 0.0700, 0.0433, 0.0226, 0.0328, 0.0356], + device='cuda:0'), in_proj_covar=tensor([0.0162, 0.0170, 0.0180, 0.0152, 0.0138, 0.0134, 0.0126, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 07:30:06,807 INFO [train.py:892] (0/4) Epoch 35, batch 900, loss[loss=0.1586, simple_loss=0.2435, pruned_loss=0.03679, over 19783.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2395, pruned_loss=0.03919, over 3904879.63 frames. ], batch size: 241, lr: 4.47e-03, grad_scale: 16.0 +2023-03-29 07:30:14,021 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6178, 4.6810, 4.9834, 4.7798, 4.9385, 4.5774, 4.7160, 4.5144], + device='cuda:0'), covar=tensor([0.1663, 0.1450, 0.0905, 0.1257, 0.0898, 0.0961, 0.2003, 0.2075], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0341, 0.0379, 0.0308, 0.0282, 0.0292, 0.0370, 0.0400], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 07:30:20,230 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63977.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:30:37,752 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5004, 4.7400, 4.8082, 4.6987, 4.4489, 4.7587, 4.3572, 4.3398], + device='cuda:0'), covar=tensor([0.0488, 0.0487, 0.0449, 0.0417, 0.0646, 0.0501, 0.0639, 0.0939], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0291, 0.0304, 0.0267, 0.0273, 0.0257, 0.0273, 0.0319], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 07:31:13,439 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-64000.pt +2023-03-29 07:32:03,691 INFO [train.py:892] (0/4) Epoch 35, batch 950, loss[loss=0.161, simple_loss=0.2479, pruned_loss=0.03701, over 19642.00 frames. ], tot_loss[loss=0.1581, simple_loss=0.2388, pruned_loss=0.03875, over 3915302.37 frames. ], batch size: 47, lr: 4.46e-03, grad_scale: 16.0 +2023-03-29 07:32:08,522 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64023.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:32:16,291 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.345e+02 3.650e+02 4.248e+02 4.892e+02 8.257e+02, threshold=8.496e+02, percent-clipped=0.0 +2023-03-29 07:33:58,707 INFO [train.py:892] (0/4) Epoch 35, batch 1000, loss[loss=0.1227, simple_loss=0.1916, pruned_loss=0.02688, over 19735.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2385, pruned_loss=0.03902, over 3921826.19 frames. ], batch size: 99, lr: 4.46e-03, grad_scale: 16.0 +2023-03-29 07:34:09,905 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64076.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:34:14,039 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64078.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:34:27,761 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64084.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:35:51,145 INFO [train.py:892] (0/4) Epoch 35, batch 1050, loss[loss=0.1376, simple_loss=0.2148, pruned_loss=0.03021, over 19657.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2373, pruned_loss=0.03874, over 3927544.45 frames. ], batch size: 47, lr: 4.46e-03, grad_scale: 16.0 +2023-03-29 07:35:58,661 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64124.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:36:04,309 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64126.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:36:05,628 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.257e+02 3.875e+02 4.468e+02 5.208e+02 1.393e+03, threshold=8.936e+02, percent-clipped=2.0 +2023-03-29 07:36:08,988 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3007, 4.9208, 4.9427, 5.2563, 4.8842, 5.4949, 5.3583, 5.5605], + device='cuda:0'), covar=tensor([0.0613, 0.0521, 0.0539, 0.0406, 0.0649, 0.0481, 0.0412, 0.0357], + device='cuda:0'), in_proj_covar=tensor([0.0157, 0.0183, 0.0206, 0.0182, 0.0181, 0.0164, 0.0156, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 07:36:16,874 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0830, 3.1960, 1.9365, 3.7075, 3.4202, 3.6571, 3.6952, 2.9281], + device='cuda:0'), covar=tensor([0.0691, 0.0687, 0.1720, 0.0720, 0.0646, 0.0498, 0.0692, 0.0904], + device='cuda:0'), in_proj_covar=tensor([0.0145, 0.0147, 0.0146, 0.0155, 0.0137, 0.0139, 0.0151, 0.0149], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 07:37:37,639 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64166.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:37:45,580 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7565, 3.1243, 3.2287, 3.6710, 2.4665, 3.1761, 2.3560, 2.3511], + device='cuda:0'), covar=tensor([0.0538, 0.1467, 0.1014, 0.0484, 0.2107, 0.0811, 0.1412, 0.1621], + device='cuda:0'), in_proj_covar=tensor([0.0249, 0.0332, 0.0252, 0.0208, 0.0250, 0.0212, 0.0223, 0.0218], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 07:37:51,243 INFO [train.py:892] (0/4) Epoch 35, batch 1100, loss[loss=0.1446, simple_loss=0.2184, pruned_loss=0.03541, over 19802.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.2377, pruned_loss=0.03904, over 3933380.42 frames. ], batch size: 148, lr: 4.46e-03, grad_scale: 16.0 +2023-03-29 07:38:09,502 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-29 07:39:00,203 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64201.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:39:32,870 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=2.01 vs. limit=2.0 +2023-03-29 07:39:48,435 INFO [train.py:892] (0/4) Epoch 35, batch 1150, loss[loss=0.1762, simple_loss=0.2538, pruned_loss=0.04928, over 19708.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.2377, pruned_loss=0.0391, over 3936554.14 frames. ], batch size: 305, lr: 4.46e-03, grad_scale: 16.0 +2023-03-29 07:39:57,618 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64225.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:39:58,185 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.56 vs. limit=2.0 +2023-03-29 07:40:00,951 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.058e+02 3.821e+02 4.310e+02 5.075e+02 9.198e+02, threshold=8.620e+02, percent-clipped=1.0 +2023-03-29 07:40:13,055 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64232.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:40:37,762 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64242.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:41:41,969 INFO [train.py:892] (0/4) Epoch 35, batch 1200, loss[loss=0.1432, simple_loss=0.2273, pruned_loss=0.0295, over 19838.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2386, pruned_loss=0.0393, over 3936739.86 frames. ], batch size: 59, lr: 4.46e-03, grad_scale: 16.0 +2023-03-29 07:41:46,072 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64272.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:41:48,096 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64273.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:42:26,845 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64290.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:42:33,596 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9250, 2.4742, 4.0686, 3.6330, 4.0069, 4.0300, 3.8420, 3.7805], + device='cuda:0'), covar=tensor([0.0613, 0.0985, 0.0119, 0.0589, 0.0149, 0.0222, 0.0177, 0.0186], + device='cuda:0'), in_proj_covar=tensor([0.0100, 0.0104, 0.0089, 0.0151, 0.0086, 0.0098, 0.0090, 0.0087], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 07:42:33,661 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64293.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:43:36,108 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2097, 2.5790, 3.4048, 3.3969, 3.9018, 4.3417, 4.1480, 4.3336], + device='cuda:0'), covar=tensor([0.0907, 0.1746, 0.1154, 0.0677, 0.0384, 0.0212, 0.0336, 0.0348], + device='cuda:0'), in_proj_covar=tensor([0.0161, 0.0169, 0.0178, 0.0151, 0.0137, 0.0133, 0.0126, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 07:43:37,214 INFO [train.py:892] (0/4) Epoch 35, batch 1250, loss[loss=0.155, simple_loss=0.2441, pruned_loss=0.03295, over 19581.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.2376, pruned_loss=0.03878, over 3940749.44 frames. ], batch size: 49, lr: 4.45e-03, grad_scale: 16.0 +2023-03-29 07:43:50,736 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.911e+02 3.890e+02 4.539e+02 5.516e+02 1.564e+03, threshold=9.078e+02, percent-clipped=1.0 +2023-03-29 07:44:36,183 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64345.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:45:36,292 INFO [train.py:892] (0/4) Epoch 35, batch 1300, loss[loss=0.1516, simple_loss=0.2297, pruned_loss=0.0367, over 19790.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2373, pruned_loss=0.03863, over 3942892.61 frames. ], batch size: 87, lr: 4.45e-03, grad_scale: 16.0 +2023-03-29 07:45:55,073 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64379.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:46:59,255 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64406.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:47:31,801 INFO [train.py:892] (0/4) Epoch 35, batch 1350, loss[loss=0.1484, simple_loss=0.2285, pruned_loss=0.03409, over 19840.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2383, pruned_loss=0.03908, over 3942497.76 frames. ], batch size: 160, lr: 4.45e-03, grad_scale: 16.0 +2023-03-29 07:47:46,398 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.065e+02 3.591e+02 4.239e+02 5.222e+02 9.468e+02, threshold=8.478e+02, percent-clipped=2.0 +2023-03-29 07:48:21,369 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-03-29 07:49:14,288 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64465.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:49:14,662 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-03-29 07:49:16,480 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64466.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:49:25,914 INFO [train.py:892] (0/4) Epoch 35, batch 1400, loss[loss=0.1721, simple_loss=0.2475, pruned_loss=0.04836, over 19710.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.238, pruned_loss=0.0389, over 3943987.85 frames. ], batch size: 78, lr: 4.45e-03, grad_scale: 16.0 +2023-03-29 07:50:37,428 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64501.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:51:09,380 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64514.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:51:23,328 INFO [train.py:892] (0/4) Epoch 35, batch 1450, loss[loss=0.1533, simple_loss=0.2261, pruned_loss=0.04024, over 19771.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2385, pruned_loss=0.03896, over 3943501.44 frames. ], batch size: 152, lr: 4.45e-03, grad_scale: 16.0 +2023-03-29 07:51:35,142 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64526.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:51:36,072 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.625e+02 3.695e+02 4.182e+02 5.095e+02 9.377e+02, threshold=8.363e+02, percent-clipped=1.0 +2023-03-29 07:52:31,537 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64549.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:52:56,948 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5288, 3.2811, 3.4792, 2.8842, 3.7262, 3.1487, 3.3609, 3.6813], + device='cuda:0'), covar=tensor([0.0519, 0.0402, 0.0580, 0.0713, 0.0413, 0.0458, 0.0432, 0.0294], + device='cuda:0'), in_proj_covar=tensor([0.0081, 0.0089, 0.0085, 0.0112, 0.0082, 0.0085, 0.0083, 0.0076], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 07:53:21,046 INFO [train.py:892] (0/4) Epoch 35, batch 1500, loss[loss=0.1597, simple_loss=0.2296, pruned_loss=0.04495, over 19866.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.239, pruned_loss=0.03932, over 3944668.81 frames. ], batch size: 129, lr: 4.45e-03, grad_scale: 16.0 +2023-03-29 07:53:24,457 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64572.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:54:00,507 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64588.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:54:02,557 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64589.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:55:13,288 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64620.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:55:15,312 INFO [train.py:892] (0/4) Epoch 35, batch 1550, loss[loss=0.1462, simple_loss=0.2366, pruned_loss=0.02789, over 19739.00 frames. ], tot_loss[loss=0.158, simple_loss=0.2383, pruned_loss=0.03885, over 3946618.05 frames. ], batch size: 99, lr: 4.44e-03, grad_scale: 16.0 +2023-03-29 07:55:27,774 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.542e+02 3.591e+02 4.055e+02 5.054e+02 8.662e+02, threshold=8.110e+02, percent-clipped=1.0 +2023-03-29 07:56:18,086 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64648.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:56:22,701 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64650.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:57:10,755 INFO [train.py:892] (0/4) Epoch 35, batch 1600, loss[loss=0.1349, simple_loss=0.2122, pruned_loss=0.02875, over 19736.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2387, pruned_loss=0.03896, over 3946801.93 frames. ], batch size: 95, lr: 4.44e-03, grad_scale: 16.0 +2023-03-29 07:57:31,314 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64679.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:57:35,507 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-03-29 07:57:39,442 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-03-29 07:58:22,487 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64701.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:58:26,661 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64703.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:58:40,138 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64709.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:59:08,794 INFO [train.py:892] (0/4) Epoch 35, batch 1650, loss[loss=0.1673, simple_loss=0.236, pruned_loss=0.0493, over 19759.00 frames. ], tot_loss[loss=0.158, simple_loss=0.238, pruned_loss=0.03907, over 3948228.95 frames. ], batch size: 188, lr: 4.44e-03, grad_scale: 16.0 +2023-03-29 07:59:21,887 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.745e+02 3.702e+02 4.321e+02 4.955e+02 1.270e+03, threshold=8.641e+02, percent-clipped=4.0 +2023-03-29 07:59:22,806 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64727.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 07:59:43,725 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0633, 4.1419, 4.4569, 4.2359, 4.4071, 4.0106, 4.1728, 3.9863], + device='cuda:0'), covar=tensor([0.1553, 0.1753, 0.0954, 0.1375, 0.0996, 0.1020, 0.1944, 0.2066], + device='cuda:0'), in_proj_covar=tensor([0.0300, 0.0340, 0.0376, 0.0304, 0.0278, 0.0288, 0.0366, 0.0394], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 08:00:03,222 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64745.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:00:09,326 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5303, 4.8233, 4.8530, 4.7386, 4.5746, 4.8149, 4.3744, 4.3746], + device='cuda:0'), covar=tensor([0.0483, 0.0484, 0.0475, 0.0441, 0.0606, 0.0509, 0.0696, 0.0969], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0294, 0.0306, 0.0266, 0.0275, 0.0259, 0.0273, 0.0321], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 08:00:20,008 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64752.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:00:46,817 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64764.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:01:02,070 INFO [train.py:892] (0/4) Epoch 35, batch 1700, loss[loss=0.182, simple_loss=0.2564, pruned_loss=0.05381, over 19781.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2383, pruned_loss=0.03943, over 3949239.22 frames. ], batch size: 131, lr: 4.44e-03, grad_scale: 16.0 +2023-03-29 08:02:25,979 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64806.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:02:39,266 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64813.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:02:55,100 INFO [train.py:892] (0/4) Epoch 35, batch 1750, loss[loss=0.164, simple_loss=0.2484, pruned_loss=0.03978, over 19781.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2397, pruned_loss=0.03975, over 3947145.03 frames. ], batch size: 53, lr: 4.44e-03, grad_scale: 16.0 +2023-03-29 08:02:55,665 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64821.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:03:07,044 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.308e+02 3.852e+02 4.447e+02 5.753e+02 1.014e+03, threshold=8.894e+02, percent-clipped=4.0 +2023-03-29 08:03:21,910 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9996, 3.3376, 3.4798, 4.0221, 2.8557, 3.2599, 2.4766, 2.5532], + device='cuda:0'), covar=tensor([0.0559, 0.1889, 0.0984, 0.0420, 0.1906, 0.0930, 0.1405, 0.1585], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0331, 0.0253, 0.0208, 0.0251, 0.0213, 0.0223, 0.0219], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 08:04:10,033 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.0892, 2.0619, 2.1413, 2.1499, 2.1056, 2.1608, 2.0986, 2.1333], + device='cuda:0'), covar=tensor([0.0415, 0.0364, 0.0380, 0.0360, 0.0479, 0.0351, 0.0478, 0.0347], + device='cuda:0'), in_proj_covar=tensor([0.0090, 0.0085, 0.0087, 0.0081, 0.0094, 0.0086, 0.0103, 0.0076], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 08:04:35,072 INFO [train.py:892] (0/4) Epoch 35, batch 1800, loss[loss=0.1497, simple_loss=0.2326, pruned_loss=0.03342, over 19773.00 frames. ], tot_loss[loss=0.159, simple_loss=0.2386, pruned_loss=0.03966, over 3948792.14 frames. ], batch size: 113, lr: 4.44e-03, grad_scale: 16.0 +2023-03-29 08:05:06,142 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64888.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:06:07,584 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.1193, 4.8014, 4.8286, 5.1622, 4.7413, 5.3554, 5.2712, 5.4975], + device='cuda:0'), covar=tensor([0.0679, 0.0343, 0.0436, 0.0300, 0.0571, 0.0318, 0.0343, 0.0239], + device='cuda:0'), in_proj_covar=tensor([0.0158, 0.0182, 0.0206, 0.0183, 0.0180, 0.0164, 0.0156, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 08:06:09,180 INFO [train.py:892] (0/4) Epoch 35, batch 1850, loss[loss=0.1613, simple_loss=0.2511, pruned_loss=0.03578, over 19579.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.2402, pruned_loss=0.03984, over 3947569.97 frames. ], batch size: 53, lr: 4.43e-03, grad_scale: 16.0 +2023-03-29 08:06:17,067 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-35.pt +2023-03-29 08:07:11,833 INFO [train.py:892] (0/4) Epoch 36, batch 0, loss[loss=0.1392, simple_loss=0.2216, pruned_loss=0.02836, over 19471.00 frames. ], tot_loss[loss=0.1392, simple_loss=0.2216, pruned_loss=0.02836, over 19471.00 frames. ], batch size: 43, lr: 4.37e-03, grad_scale: 16.0 +2023-03-29 08:07:11,835 INFO [train.py:917] (0/4) Computing validation loss +2023-03-29 08:07:46,030 INFO [train.py:926] (0/4) Epoch 36, validation: loss=0.183, simple_loss=0.249, pruned_loss=0.05846, over 2883724.00 frames. +2023-03-29 08:07:46,031 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-29 08:07:48,051 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.083e+02 3.485e+02 4.267e+02 5.108e+02 8.561e+02, threshold=8.534e+02, percent-clipped=0.0 +2023-03-29 08:08:10,038 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64936.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:08:19,506 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7655, 2.4350, 2.6095, 3.0081, 3.5168, 3.7531, 3.6992, 3.6983], + device='cuda:0'), covar=tensor([0.1124, 0.1584, 0.1516, 0.0811, 0.0507, 0.0321, 0.0391, 0.0458], + device='cuda:0'), in_proj_covar=tensor([0.0161, 0.0168, 0.0180, 0.0152, 0.0137, 0.0133, 0.0126, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 08:08:30,533 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0414, 2.6567, 3.0066, 3.1778, 3.7639, 4.2161, 4.0007, 4.0950], + device='cuda:0'), covar=tensor([0.0948, 0.1516, 0.1320, 0.0740, 0.0418, 0.0251, 0.0417, 0.0370], + device='cuda:0'), in_proj_covar=tensor([0.0161, 0.0168, 0.0179, 0.0152, 0.0137, 0.0133, 0.0126, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 08:08:32,636 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64945.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:09:41,961 INFO [train.py:892] (0/4) Epoch 36, batch 50, loss[loss=0.1579, simple_loss=0.2338, pruned_loss=0.041, over 19736.00 frames. ], tot_loss[loss=0.1543, simple_loss=0.2343, pruned_loss=0.03718, over 890223.80 frames. ], batch size: 77, lr: 4.37e-03, grad_scale: 16.0 +2023-03-29 08:09:42,893 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64976.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:10:42,200 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65001.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:10:49,848 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65004.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:11:33,501 INFO [train.py:892] (0/4) Epoch 36, batch 100, loss[loss=0.1436, simple_loss=0.2265, pruned_loss=0.03032, over 19772.00 frames. ], tot_loss[loss=0.154, simple_loss=0.2335, pruned_loss=0.03731, over 1566926.30 frames. ], batch size: 116, lr: 4.37e-03, grad_scale: 16.0 +2023-03-29 08:11:35,855 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.481e+02 3.835e+02 4.535e+02 5.519e+02 9.407e+02, threshold=9.071e+02, percent-clipped=1.0 +2023-03-29 08:11:59,242 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65037.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 08:12:26,388 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65049.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:12:42,352 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65055.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:12:50,348 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65059.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:13:25,762 INFO [train.py:892] (0/4) Epoch 36, batch 150, loss[loss=0.202, simple_loss=0.2933, pruned_loss=0.05536, over 19630.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2341, pruned_loss=0.03741, over 2095485.78 frames. ], batch size: 351, lr: 4.37e-03, grad_scale: 16.0 +2023-03-29 08:13:44,132 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0538, 4.1496, 2.5485, 4.4395, 4.6342, 2.0508, 3.8075, 3.4832], + device='cuda:0'), covar=tensor([0.0784, 0.0948, 0.2808, 0.0768, 0.0600, 0.2958, 0.1116, 0.0914], + device='cuda:0'), in_proj_covar=tensor([0.0239, 0.0261, 0.0233, 0.0281, 0.0261, 0.0206, 0.0242, 0.0201], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 08:14:19,289 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5666, 2.6594, 2.8631, 2.4703, 3.0363, 2.9873, 3.4593, 3.7761], + device='cuda:0'), covar=tensor([0.0723, 0.1761, 0.1710, 0.2369, 0.1518, 0.1440, 0.0736, 0.0678], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0244, 0.0272, 0.0257, 0.0304, 0.0262, 0.0237, 0.0263], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 08:14:25,044 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65101.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:14:39,267 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65108.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:14:58,799 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65116.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:15:10,912 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65121.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:15:22,313 INFO [train.py:892] (0/4) Epoch 36, batch 200, loss[loss=0.1567, simple_loss=0.2337, pruned_loss=0.03979, over 19826.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2345, pruned_loss=0.03683, over 2504525.66 frames. ], batch size: 166, lr: 4.36e-03, grad_scale: 16.0 +2023-03-29 08:15:24,693 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.519e+02 3.588e+02 4.285e+02 5.132e+02 1.208e+03, threshold=8.571e+02, percent-clipped=3.0 +2023-03-29 08:15:30,296 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-29 08:17:01,273 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65169.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:17:15,462 INFO [train.py:892] (0/4) Epoch 36, batch 250, loss[loss=0.1517, simple_loss=0.2296, pruned_loss=0.03693, over 19831.00 frames. ], tot_loss[loss=0.155, simple_loss=0.2352, pruned_loss=0.03737, over 2825090.10 frames. ], batch size: 177, lr: 4.36e-03, grad_scale: 16.0 +2023-03-29 08:19:08,896 INFO [train.py:892] (0/4) Epoch 36, batch 300, loss[loss=0.1605, simple_loss=0.252, pruned_loss=0.03448, over 19875.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2354, pruned_loss=0.03757, over 3074977.62 frames. ], batch size: 53, lr: 4.36e-03, grad_scale: 32.0 +2023-03-29 08:19:12,291 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.658e+02 3.728e+02 4.313e+02 5.514e+02 9.328e+02, threshold=8.626e+02, percent-clipped=1.0 +2023-03-29 08:19:53,874 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65245.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:21:04,026 INFO [train.py:892] (0/4) Epoch 36, batch 350, loss[loss=0.1608, simple_loss=0.2333, pruned_loss=0.04415, over 19780.00 frames. ], tot_loss[loss=0.1563, simple_loss=0.2371, pruned_loss=0.03778, over 3266545.87 frames. ], batch size: 154, lr: 4.36e-03, grad_scale: 32.0 +2023-03-29 08:21:23,775 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65285.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:21:42,216 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65293.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:21:58,240 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1386, 3.3519, 3.3789, 3.3857, 3.0235, 3.3787, 3.0731, 3.3035], + device='cuda:0'), covar=tensor([0.0300, 0.0275, 0.0269, 0.0224, 0.0472, 0.0274, 0.0425, 0.0468], + device='cuda:0'), in_proj_covar=tensor([0.0091, 0.0085, 0.0088, 0.0082, 0.0095, 0.0087, 0.0104, 0.0077], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 08:22:08,707 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65304.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:22:56,434 INFO [train.py:892] (0/4) Epoch 36, batch 400, loss[loss=0.1729, simple_loss=0.2495, pruned_loss=0.04813, over 19803.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2361, pruned_loss=0.03725, over 3417475.36 frames. ], batch size: 67, lr: 4.36e-03, grad_scale: 32.0 +2023-03-29 08:22:58,395 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.673e+02 3.890e+02 4.309e+02 5.103e+02 7.724e+02, threshold=8.618e+02, percent-clipped=0.0 +2023-03-29 08:23:09,713 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65332.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 08:23:17,998 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1531, 3.3654, 3.2948, 3.3906, 3.1234, 3.3341, 2.9860, 3.4091], + device='cuda:0'), covar=tensor([0.0313, 0.0270, 0.0311, 0.0219, 0.0422, 0.0329, 0.0392, 0.0368], + device='cuda:0'), in_proj_covar=tensor([0.0091, 0.0085, 0.0088, 0.0082, 0.0094, 0.0087, 0.0104, 0.0077], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 08:23:29,310 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.5089, 5.7792, 5.7921, 5.7039, 5.4835, 5.8081, 5.1792, 5.2214], + device='cuda:0'), covar=tensor([0.0427, 0.0427, 0.0461, 0.0424, 0.0576, 0.0440, 0.0672, 0.1013], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0294, 0.0306, 0.0268, 0.0276, 0.0260, 0.0275, 0.0322], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 08:23:45,218 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65346.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:23:57,944 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65352.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:24:14,569 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65359.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:24:54,096 INFO [train.py:892] (0/4) Epoch 36, batch 450, loss[loss=0.1673, simple_loss=0.2452, pruned_loss=0.04467, over 19764.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2374, pruned_loss=0.03803, over 3533992.22 frames. ], batch size: 244, lr: 4.36e-03, grad_scale: 16.0 +2023-03-29 08:25:49,795 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65401.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:26:03,584 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65407.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:26:05,552 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65408.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:26:14,897 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65411.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:26:46,787 INFO [train.py:892] (0/4) Epoch 36, batch 500, loss[loss=0.183, simple_loss=0.258, pruned_loss=0.05401, over 19750.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2362, pruned_loss=0.03807, over 3626783.14 frames. ], batch size: 250, lr: 4.35e-03, grad_scale: 16.0 +2023-03-29 08:26:52,437 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.389e+02 3.662e+02 4.205e+02 4.670e+02 6.884e+02, threshold=8.409e+02, percent-clipped=0.0 +2023-03-29 08:27:37,907 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65449.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:27:55,751 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65456.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:28:04,771 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9398, 4.0561, 2.4295, 4.2808, 4.4180, 2.0554, 3.6814, 3.4042], + device='cuda:0'), covar=tensor([0.0681, 0.0804, 0.2654, 0.0639, 0.0527, 0.2541, 0.0926, 0.0799], + device='cuda:0'), in_proj_covar=tensor([0.0239, 0.0261, 0.0234, 0.0282, 0.0261, 0.0206, 0.0242, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 08:28:34,712 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3896, 4.2112, 4.6650, 4.2625, 3.8964, 4.4429, 4.3075, 4.7229], + device='cuda:0'), covar=tensor([0.0713, 0.0365, 0.0336, 0.0380, 0.0992, 0.0534, 0.0476, 0.0324], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0225, 0.0225, 0.0237, 0.0208, 0.0248, 0.0238, 0.0222], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 08:28:38,036 INFO [train.py:892] (0/4) Epoch 36, batch 550, loss[loss=0.261, simple_loss=0.3359, pruned_loss=0.0931, over 19424.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2369, pruned_loss=0.03881, over 3697993.87 frames. ], batch size: 431, lr: 4.35e-03, grad_scale: 16.0 +2023-03-29 08:29:04,287 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1153, 2.9624, 3.2124, 2.5909, 3.2946, 2.7053, 3.1061, 3.0591], + device='cuda:0'), covar=tensor([0.0610, 0.0514, 0.0480, 0.0803, 0.0369, 0.0536, 0.0445, 0.0380], + device='cuda:0'), in_proj_covar=tensor([0.0083, 0.0091, 0.0088, 0.0114, 0.0083, 0.0087, 0.0084, 0.0078], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 08:29:49,218 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6537, 3.1913, 3.5764, 3.1557, 3.8311, 3.9214, 4.5090, 5.0213], + device='cuda:0'), covar=tensor([0.0473, 0.1563, 0.1317, 0.2075, 0.1634, 0.1200, 0.0517, 0.0443], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0244, 0.0272, 0.0258, 0.0304, 0.0262, 0.0237, 0.0263], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 08:29:51,109 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7812, 2.8022, 2.9008, 2.3888, 2.9691, 2.4864, 2.8138, 2.8172], + device='cuda:0'), covar=tensor([0.0646, 0.0480, 0.0626, 0.0849, 0.0446, 0.0553, 0.0550, 0.0476], + device='cuda:0'), in_proj_covar=tensor([0.0083, 0.0091, 0.0087, 0.0114, 0.0083, 0.0087, 0.0084, 0.0078], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 08:30:30,660 INFO [train.py:892] (0/4) Epoch 36, batch 600, loss[loss=0.1454, simple_loss=0.2201, pruned_loss=0.03535, over 19803.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.237, pruned_loss=0.03892, over 3753842.97 frames. ], batch size: 148, lr: 4.35e-03, grad_scale: 16.0 +2023-03-29 08:30:34,422 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.355e+02 3.593e+02 4.441e+02 5.313e+02 1.329e+03, threshold=8.882e+02, percent-clipped=4.0 +2023-03-29 08:30:42,658 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65532.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 08:32:02,958 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8612, 4.6992, 5.2107, 4.7451, 4.2169, 4.9227, 4.7924, 5.3396], + device='cuda:0'), covar=tensor([0.0764, 0.0401, 0.0371, 0.0381, 0.0845, 0.0552, 0.0511, 0.0320], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0226, 0.0226, 0.0238, 0.0208, 0.0249, 0.0239, 0.0223], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 08:32:03,022 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7739, 2.9119, 4.8168, 4.1185, 4.4583, 4.7257, 4.5210, 4.4545], + device='cuda:0'), covar=tensor([0.0468, 0.0901, 0.0092, 0.0826, 0.0142, 0.0186, 0.0170, 0.0149], + device='cuda:0'), in_proj_covar=tensor([0.0102, 0.0105, 0.0090, 0.0153, 0.0087, 0.0100, 0.0091, 0.0088], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 08:32:21,945 INFO [train.py:892] (0/4) Epoch 36, batch 650, loss[loss=0.1727, simple_loss=0.2499, pruned_loss=0.04779, over 19811.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2368, pruned_loss=0.03855, over 3797588.15 frames. ], batch size: 132, lr: 4.35e-03, grad_scale: 16.0 +2023-03-29 08:33:01,090 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65593.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 08:34:14,090 INFO [train.py:892] (0/4) Epoch 36, batch 700, loss[loss=0.1497, simple_loss=0.2212, pruned_loss=0.0391, over 19874.00 frames. ], tot_loss[loss=0.1563, simple_loss=0.2362, pruned_loss=0.03815, over 3831748.48 frames. ], batch size: 134, lr: 4.35e-03, grad_scale: 16.0 +2023-03-29 08:34:18,131 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.459e+02 3.698e+02 4.189e+02 4.947e+02 1.521e+03, threshold=8.379e+02, percent-clipped=3.0 +2023-03-29 08:34:30,970 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65632.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:34:44,706 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.90 vs. limit=5.0 +2023-03-29 08:34:52,630 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65641.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:36:12,654 INFO [train.py:892] (0/4) Epoch 36, batch 750, loss[loss=0.1476, simple_loss=0.2209, pruned_loss=0.03713, over 19800.00 frames. ], tot_loss[loss=0.156, simple_loss=0.2359, pruned_loss=0.03802, over 3859070.19 frames. ], batch size: 200, lr: 4.35e-03, grad_scale: 16.0 +2023-03-29 08:36:22,144 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65680.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:36:22,728 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-03-29 08:37:34,577 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65711.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:38:07,162 INFO [train.py:892] (0/4) Epoch 36, batch 800, loss[loss=0.2174, simple_loss=0.2994, pruned_loss=0.06775, over 19624.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.2378, pruned_loss=0.03902, over 3876557.69 frames. ], batch size: 387, lr: 4.34e-03, grad_scale: 16.0 +2023-03-29 08:38:11,739 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.627e+02 3.709e+02 4.446e+02 5.789e+02 1.138e+03, threshold=8.893e+02, percent-clipped=2.0 +2023-03-29 08:39:24,859 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65759.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:39:31,726 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0967, 2.6009, 3.1084, 3.2530, 3.7060, 4.2359, 4.0736, 4.0809], + device='cuda:0'), covar=tensor([0.0939, 0.1613, 0.1217, 0.0689, 0.0488, 0.0211, 0.0331, 0.0407], + device='cuda:0'), in_proj_covar=tensor([0.0164, 0.0171, 0.0181, 0.0155, 0.0140, 0.0135, 0.0127, 0.0120], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 08:39:35,984 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7689, 2.8006, 2.8051, 2.3205, 2.8620, 2.5033, 2.8187, 2.7823], + device='cuda:0'), covar=tensor([0.0568, 0.0487, 0.0589, 0.0858, 0.0413, 0.0490, 0.0484, 0.0476], + device='cuda:0'), in_proj_covar=tensor([0.0082, 0.0090, 0.0087, 0.0113, 0.0082, 0.0086, 0.0083, 0.0077], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 08:40:02,203 INFO [train.py:892] (0/4) Epoch 36, batch 850, loss[loss=0.1527, simple_loss=0.2292, pruned_loss=0.03808, over 19740.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2374, pruned_loss=0.03913, over 3892927.12 frames. ], batch size: 92, lr: 4.34e-03, grad_scale: 16.0 +2023-03-29 08:41:36,307 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.54 vs. limit=2.0 +2023-03-29 08:41:53,813 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.9735, 1.9222, 2.0148, 2.0245, 1.9697, 2.0365, 1.9043, 2.0362], + device='cuda:0'), covar=tensor([0.0401, 0.0381, 0.0365, 0.0366, 0.0473, 0.0349, 0.0551, 0.0373], + device='cuda:0'), in_proj_covar=tensor([0.0091, 0.0086, 0.0088, 0.0083, 0.0095, 0.0088, 0.0105, 0.0077], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 08:41:57,339 INFO [train.py:892] (0/4) Epoch 36, batch 900, loss[loss=0.1988, simple_loss=0.2821, pruned_loss=0.05772, over 19644.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2384, pruned_loss=0.03944, over 3904526.57 frames. ], batch size: 343, lr: 4.34e-03, grad_scale: 16.0 +2023-03-29 08:42:01,152 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.659e+02 3.793e+02 4.465e+02 5.730e+02 1.109e+03, threshold=8.930e+02, percent-clipped=1.0 +2023-03-29 08:42:24,340 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65838.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 08:43:50,246 INFO [train.py:892] (0/4) Epoch 36, batch 950, loss[loss=0.1613, simple_loss=0.2349, pruned_loss=0.04381, over 19754.00 frames. ], tot_loss[loss=0.1584, simple_loss=0.2384, pruned_loss=0.03924, over 3914494.21 frames. ], batch size: 182, lr: 4.34e-03, grad_scale: 16.0 +2023-03-29 08:44:17,906 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65888.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 08:44:45,489 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65899.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 08:45:36,052 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8072, 2.3327, 3.8906, 3.3770, 3.8463, 3.9397, 3.6801, 3.7245], + device='cuda:0'), covar=tensor([0.0698, 0.1069, 0.0130, 0.0556, 0.0168, 0.0229, 0.0207, 0.0203], + device='cuda:0'), in_proj_covar=tensor([0.0102, 0.0105, 0.0091, 0.0153, 0.0088, 0.0101, 0.0092, 0.0088], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 08:45:40,270 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3000, 2.4994, 3.5865, 2.9208, 2.9864, 2.9088, 2.1256, 2.3313], + device='cuda:0'), covar=tensor([0.1252, 0.3238, 0.0732, 0.1163, 0.1967, 0.1641, 0.2771, 0.2734], + device='cuda:0'), in_proj_covar=tensor([0.0352, 0.0392, 0.0349, 0.0289, 0.0375, 0.0382, 0.0379, 0.0350], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 08:45:43,699 INFO [train.py:892] (0/4) Epoch 36, batch 1000, loss[loss=0.1236, simple_loss=0.2002, pruned_loss=0.02353, over 19774.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.2377, pruned_loss=0.03873, over 3922470.81 frames. ], batch size: 116, lr: 4.34e-03, grad_scale: 16.0 +2023-03-29 08:45:47,945 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.017e+02 3.590e+02 4.101e+02 4.810e+02 1.037e+03, threshold=8.201e+02, percent-clipped=3.0 +2023-03-29 08:46:20,458 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65941.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:46:39,074 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3675, 5.6383, 5.6875, 5.5974, 5.3416, 5.6378, 5.1294, 5.1569], + device='cuda:0'), covar=tensor([0.0415, 0.0428, 0.0455, 0.0414, 0.0564, 0.0511, 0.0736, 0.0961], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0299, 0.0309, 0.0271, 0.0279, 0.0262, 0.0278, 0.0325], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 08:47:38,751 INFO [train.py:892] (0/4) Epoch 36, batch 1050, loss[loss=0.1602, simple_loss=0.2515, pruned_loss=0.03444, over 19566.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.2368, pruned_loss=0.03801, over 3927839.47 frames. ], batch size: 53, lr: 4.34e-03, grad_scale: 16.0 +2023-03-29 08:48:10,609 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65989.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:48:34,836 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-66000.pt +2023-03-29 08:48:56,369 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5447, 3.4746, 3.6437, 2.7716, 3.8711, 3.2858, 3.4735, 3.8701], + device='cuda:0'), covar=tensor([0.0826, 0.0387, 0.0809, 0.0793, 0.0369, 0.0394, 0.0506, 0.0284], + device='cuda:0'), in_proj_covar=tensor([0.0082, 0.0091, 0.0087, 0.0114, 0.0083, 0.0086, 0.0084, 0.0078], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 08:49:06,135 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6622, 4.3864, 4.4655, 4.2159, 4.6835, 3.1522, 3.8949, 2.2289], + device='cuda:0'), covar=tensor([0.0190, 0.0210, 0.0147, 0.0198, 0.0145, 0.0982, 0.0739, 0.1561], + device='cuda:0'), in_proj_covar=tensor([0.0107, 0.0149, 0.0115, 0.0137, 0.0121, 0.0137, 0.0145, 0.0129], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 08:49:35,977 INFO [train.py:892] (0/4) Epoch 36, batch 1100, loss[loss=0.1558, simple_loss=0.2369, pruned_loss=0.03734, over 19828.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2368, pruned_loss=0.03815, over 3932045.83 frames. ], batch size: 288, lr: 4.33e-03, grad_scale: 16.0 +2023-03-29 08:49:39,606 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.898e+02 3.611e+02 4.375e+02 5.188e+02 7.346e+02, threshold=8.750e+02, percent-clipped=0.0 +2023-03-29 08:51:23,875 INFO [train.py:892] (0/4) Epoch 36, batch 1150, loss[loss=0.1426, simple_loss=0.2174, pruned_loss=0.03392, over 19837.00 frames. ], tot_loss[loss=0.158, simple_loss=0.2381, pruned_loss=0.03897, over 3935959.42 frames. ], batch size: 146, lr: 4.33e-03, grad_scale: 16.0 +2023-03-29 08:51:32,871 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.37 vs. limit=5.0 +2023-03-29 08:52:33,035 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5533, 2.7456, 4.0323, 3.1311, 3.3024, 3.1239, 2.3651, 2.6121], + device='cuda:0'), covar=tensor([0.1207, 0.3173, 0.0621, 0.1137, 0.1893, 0.1643, 0.2505, 0.2635], + device='cuda:0'), in_proj_covar=tensor([0.0353, 0.0392, 0.0350, 0.0290, 0.0376, 0.0384, 0.0380, 0.0351], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 08:52:44,450 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3520, 2.6448, 4.3784, 3.7460, 4.1100, 4.3332, 4.0694, 4.1313], + device='cuda:0'), covar=tensor([0.0557, 0.0990, 0.0108, 0.0641, 0.0167, 0.0203, 0.0201, 0.0181], + device='cuda:0'), in_proj_covar=tensor([0.0101, 0.0104, 0.0090, 0.0152, 0.0087, 0.0100, 0.0091, 0.0087], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 08:53:11,000 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5511, 1.9979, 2.2702, 2.7367, 3.1109, 3.1971, 3.1570, 3.1647], + device='cuda:0'), covar=tensor([0.1105, 0.1802, 0.1442, 0.0782, 0.0572, 0.0424, 0.0460, 0.0514], + device='cuda:0'), in_proj_covar=tensor([0.0164, 0.0170, 0.0180, 0.0154, 0.0139, 0.0135, 0.0127, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 08:53:11,131 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2887, 2.4269, 3.7576, 2.8107, 2.9319, 2.8094, 2.1037, 2.2857], + device='cuda:0'), covar=tensor([0.1395, 0.3580, 0.0649, 0.1340, 0.2589, 0.1856, 0.3013, 0.3089], + device='cuda:0'), in_proj_covar=tensor([0.0354, 0.0394, 0.0351, 0.0291, 0.0378, 0.0385, 0.0382, 0.0352], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 08:53:15,604 INFO [train.py:892] (0/4) Epoch 36, batch 1200, loss[loss=0.1674, simple_loss=0.2485, pruned_loss=0.04318, over 19771.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2383, pruned_loss=0.03905, over 3939895.72 frames. ], batch size: 69, lr: 4.33e-03, grad_scale: 16.0 +2023-03-29 08:53:19,997 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.230e+02 3.760e+02 4.454e+02 5.058e+02 1.051e+03, threshold=8.908e+02, percent-clipped=2.0 +2023-03-29 08:53:58,118 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66144.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:54:14,767 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5232, 3.6190, 2.2524, 3.7409, 3.8373, 1.8280, 3.1746, 2.9475], + device='cuda:0'), covar=tensor([0.0811, 0.0852, 0.2813, 0.0841, 0.0649, 0.2870, 0.1240, 0.0982], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0265, 0.0237, 0.0285, 0.0264, 0.0210, 0.0246, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 08:54:33,301 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66160.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 08:55:09,246 INFO [train.py:892] (0/4) Epoch 36, batch 1250, loss[loss=0.1569, simple_loss=0.2317, pruned_loss=0.04108, over 19764.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2379, pruned_loss=0.03878, over 3940617.43 frames. ], batch size: 122, lr: 4.33e-03, grad_scale: 16.0 +2023-03-29 08:55:38,386 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66188.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 08:55:51,128 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66194.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 08:56:16,181 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66205.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 08:56:52,634 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66221.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 08:57:03,442 INFO [train.py:892] (0/4) Epoch 36, batch 1300, loss[loss=0.1454, simple_loss=0.2246, pruned_loss=0.03311, over 19788.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2373, pruned_loss=0.03858, over 3943064.75 frames. ], batch size: 168, lr: 4.33e-03, grad_scale: 16.0 +2023-03-29 08:57:07,571 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.794e+02 3.837e+02 4.506e+02 5.422e+02 1.103e+03, threshold=9.011e+02, percent-clipped=2.0 +2023-03-29 08:57:25,561 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66236.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 08:58:16,428 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66258.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 08:58:57,028 INFO [train.py:892] (0/4) Epoch 36, batch 1350, loss[loss=0.1572, simple_loss=0.2306, pruned_loss=0.04191, over 19756.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.237, pruned_loss=0.03816, over 3943990.65 frames. ], batch size: 205, lr: 4.33e-03, grad_scale: 16.0 +2023-03-29 08:59:24,667 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5954, 3.6739, 2.2171, 4.2848, 3.8374, 4.2602, 4.3028, 3.2610], + device='cuda:0'), covar=tensor([0.0570, 0.0566, 0.1478, 0.0562, 0.0601, 0.0381, 0.0556, 0.0824], + device='cuda:0'), in_proj_covar=tensor([0.0146, 0.0147, 0.0145, 0.0156, 0.0136, 0.0139, 0.0151, 0.0148], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 09:00:35,588 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66319.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:00:49,669 INFO [train.py:892] (0/4) Epoch 36, batch 1400, loss[loss=0.1469, simple_loss=0.2186, pruned_loss=0.03758, over 19866.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.2371, pruned_loss=0.03838, over 3943601.53 frames. ], batch size: 129, lr: 4.32e-03, grad_scale: 16.0 +2023-03-29 09:00:54,075 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.446e+02 3.517e+02 4.188e+02 4.908e+02 8.356e+02, threshold=8.377e+02, percent-clipped=0.0 +2023-03-29 09:01:19,611 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6258, 2.6691, 1.7840, 2.9766, 2.7827, 2.9029, 3.0077, 2.4292], + device='cuda:0'), covar=tensor([0.0763, 0.0788, 0.1484, 0.0738, 0.0636, 0.0592, 0.0637, 0.0953], + device='cuda:0'), in_proj_covar=tensor([0.0147, 0.0147, 0.0145, 0.0157, 0.0137, 0.0140, 0.0151, 0.0149], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 09:02:43,608 INFO [train.py:892] (0/4) Epoch 36, batch 1450, loss[loss=0.1541, simple_loss=0.227, pruned_loss=0.04058, over 19869.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2374, pruned_loss=0.03838, over 3945673.26 frames. ], batch size: 122, lr: 4.32e-03, grad_scale: 16.0 +2023-03-29 09:04:34,692 INFO [train.py:892] (0/4) Epoch 36, batch 1500, loss[loss=0.1786, simple_loss=0.256, pruned_loss=0.05062, over 19818.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2359, pruned_loss=0.03796, over 3947879.34 frames. ], batch size: 57, lr: 4.32e-03, grad_scale: 16.0 +2023-03-29 09:04:40,365 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.359e+02 3.684e+02 4.435e+02 5.309e+02 1.081e+03, threshold=8.870e+02, percent-clipped=2.0 +2023-03-29 09:06:06,986 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6739, 2.6490, 2.9906, 2.5949, 3.1060, 3.0244, 3.5736, 3.9018], + device='cuda:0'), covar=tensor([0.0648, 0.1737, 0.1577, 0.2256, 0.1575, 0.1525, 0.0702, 0.0608], + device='cuda:0'), in_proj_covar=tensor([0.0259, 0.0246, 0.0273, 0.0258, 0.0305, 0.0263, 0.0238, 0.0264], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 09:06:32,053 INFO [train.py:892] (0/4) Epoch 36, batch 1550, loss[loss=0.1478, simple_loss=0.2163, pruned_loss=0.03967, over 19810.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2368, pruned_loss=0.03858, over 3947789.23 frames. ], batch size: 149, lr: 4.32e-03, grad_scale: 16.0 +2023-03-29 09:07:11,627 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66494.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 09:07:25,648 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66500.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:08:01,220 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66516.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 09:08:01,485 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2150, 3.5541, 3.0851, 2.7066, 3.0617, 3.5546, 3.4279, 3.4162], + device='cuda:0'), covar=tensor([0.0276, 0.0279, 0.0287, 0.0436, 0.0327, 0.0230, 0.0253, 0.0242], + device='cuda:0'), in_proj_covar=tensor([0.0110, 0.0102, 0.0104, 0.0105, 0.0109, 0.0092, 0.0093, 0.0092], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 09:08:01,548 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4964, 4.7069, 2.6641, 4.9061, 5.0874, 2.2221, 4.3722, 3.7175], + device='cuda:0'), covar=tensor([0.0604, 0.0589, 0.2666, 0.0577, 0.0450, 0.2657, 0.0836, 0.0867], + device='cuda:0'), in_proj_covar=tensor([0.0239, 0.0262, 0.0235, 0.0281, 0.0261, 0.0207, 0.0244, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 09:08:11,848 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66521.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:08:20,341 INFO [train.py:892] (0/4) Epoch 36, batch 1600, loss[loss=0.133, simple_loss=0.2068, pruned_loss=0.02958, over 19747.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2362, pruned_loss=0.03858, over 3948977.44 frames. ], batch size: 89, lr: 4.32e-03, grad_scale: 16.0 +2023-03-29 09:08:24,087 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.296e+02 3.591e+02 4.311e+02 5.202e+02 1.053e+03, threshold=8.622e+02, percent-clipped=1.0 +2023-03-29 09:08:57,601 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66542.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:09:02,310 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-29 09:09:42,620 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0477, 3.4221, 2.9782, 2.5015, 2.9462, 3.3627, 3.2981, 3.2831], + device='cuda:0'), covar=tensor([0.0299, 0.0251, 0.0265, 0.0507, 0.0326, 0.0224, 0.0210, 0.0206], + device='cuda:0'), in_proj_covar=tensor([0.0110, 0.0103, 0.0105, 0.0106, 0.0110, 0.0093, 0.0094, 0.0093], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 09:10:14,426 INFO [train.py:892] (0/4) Epoch 36, batch 1650, loss[loss=0.1488, simple_loss=0.2449, pruned_loss=0.02636, over 19851.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2369, pruned_loss=0.03871, over 3950096.90 frames. ], batch size: 56, lr: 4.32e-03, grad_scale: 16.0 +2023-03-29 09:10:15,417 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2966, 3.0870, 3.3908, 2.6683, 3.5495, 2.9349, 3.2613, 3.3320], + device='cuda:0'), covar=tensor([0.0576, 0.0474, 0.0555, 0.0813, 0.0332, 0.0447, 0.0482, 0.0385], + device='cuda:0'), in_proj_covar=tensor([0.0082, 0.0091, 0.0087, 0.0113, 0.0083, 0.0086, 0.0084, 0.0078], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 09:10:29,196 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66582.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 09:11:14,259 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3736, 3.3075, 5.1566, 3.7102, 4.0260, 3.8004, 2.7895, 2.9555], + device='cuda:0'), covar=tensor([0.0877, 0.3032, 0.0358, 0.0991, 0.1757, 0.1420, 0.2520, 0.2550], + device='cuda:0'), in_proj_covar=tensor([0.0354, 0.0395, 0.0351, 0.0292, 0.0378, 0.0386, 0.0383, 0.0352], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 09:11:42,725 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66614.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:12:09,543 INFO [train.py:892] (0/4) Epoch 36, batch 1700, loss[loss=0.1664, simple_loss=0.2445, pruned_loss=0.04417, over 19543.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2373, pruned_loss=0.03858, over 3950392.88 frames. ], batch size: 41, lr: 4.31e-03, grad_scale: 16.0 +2023-03-29 09:12:13,637 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.577e+02 3.609e+02 4.438e+02 5.504e+02 8.717e+02, threshold=8.877e+02, percent-clipped=1.0 +2023-03-29 09:13:00,329 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9388, 3.4589, 3.9497, 2.7073, 4.1457, 3.3742, 3.6425, 4.1258], + device='cuda:0'), covar=tensor([0.0747, 0.0482, 0.0573, 0.1031, 0.0354, 0.0399, 0.0427, 0.0322], + device='cuda:0'), in_proj_covar=tensor([0.0082, 0.0091, 0.0087, 0.0113, 0.0083, 0.0086, 0.0084, 0.0078], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 09:13:57,340 INFO [train.py:892] (0/4) Epoch 36, batch 1750, loss[loss=0.1506, simple_loss=0.2415, pruned_loss=0.02986, over 19604.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2373, pruned_loss=0.03844, over 3948838.81 frames. ], batch size: 48, lr: 4.31e-03, grad_scale: 16.0 +2023-03-29 09:15:32,748 INFO [train.py:892] (0/4) Epoch 36, batch 1800, loss[loss=0.1618, simple_loss=0.2532, pruned_loss=0.0352, over 19675.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2379, pruned_loss=0.03882, over 3949503.70 frames. ], batch size: 49, lr: 4.31e-03, grad_scale: 16.0 +2023-03-29 09:15:36,377 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.260e+02 3.634e+02 4.363e+02 5.062e+02 8.323e+02, threshold=8.726e+02, percent-clipped=0.0 +2023-03-29 09:15:42,723 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66731.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:15:48,275 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6170, 3.2330, 3.5159, 3.0188, 3.7541, 3.8175, 4.4487, 4.9848], + device='cuda:0'), covar=tensor([0.0464, 0.1587, 0.1472, 0.2310, 0.1724, 0.1242, 0.0528, 0.0355], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0247, 0.0276, 0.0261, 0.0307, 0.0266, 0.0240, 0.0267], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 09:17:03,408 INFO [train.py:892] (0/4) Epoch 36, batch 1850, loss[loss=0.1602, simple_loss=0.2449, pruned_loss=0.0377, over 19587.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2394, pruned_loss=0.03859, over 3948157.39 frames. ], batch size: 53, lr: 4.31e-03, grad_scale: 16.0 +2023-03-29 09:17:11,260 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-36.pt +2023-03-29 09:18:08,594 INFO [train.py:892] (0/4) Epoch 37, batch 0, loss[loss=0.1338, simple_loss=0.2176, pruned_loss=0.025, over 19732.00 frames. ], tot_loss[loss=0.1338, simple_loss=0.2176, pruned_loss=0.025, over 19732.00 frames. ], batch size: 92, lr: 4.25e-03, grad_scale: 16.0 +2023-03-29 09:18:08,595 INFO [train.py:917] (0/4) Computing validation loss +2023-03-29 09:18:41,824 INFO [train.py:926] (0/4) Epoch 37, validation: loss=0.1834, simple_loss=0.2492, pruned_loss=0.05881, over 2883724.00 frames. +2023-03-29 09:18:41,825 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-29 09:18:58,441 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0969, 2.9078, 3.1336, 2.5718, 3.2281, 2.7172, 3.0738, 3.1106], + device='cuda:0'), covar=tensor([0.0615, 0.0593, 0.0602, 0.0774, 0.0399, 0.0489, 0.0513, 0.0374], + device='cuda:0'), in_proj_covar=tensor([0.0082, 0.0091, 0.0087, 0.0114, 0.0083, 0.0086, 0.0084, 0.0078], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 09:19:11,146 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66792.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 09:19:27,982 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66800.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 09:20:03,785 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66816.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 09:20:31,169 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.683e+02 3.599e+02 4.155e+02 4.773e+02 9.045e+02, threshold=8.309e+02, percent-clipped=1.0 +2023-03-29 09:20:39,152 INFO [train.py:892] (0/4) Epoch 37, batch 50, loss[loss=0.1317, simple_loss=0.2114, pruned_loss=0.02601, over 19858.00 frames. ], tot_loss[loss=0.1531, simple_loss=0.2338, pruned_loss=0.0362, over 890817.38 frames. ], batch size: 99, lr: 4.25e-03, grad_scale: 16.0 +2023-03-29 09:21:15,269 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66848.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:21:51,434 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66864.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:22:01,924 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-03-29 09:22:19,726 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66877.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 09:22:30,251 INFO [train.py:892] (0/4) Epoch 37, batch 100, loss[loss=0.1428, simple_loss=0.2313, pruned_loss=0.02713, over 19828.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2345, pruned_loss=0.03641, over 1570138.57 frames. ], batch size: 76, lr: 4.25e-03, grad_scale: 16.0 +2023-03-29 09:23:45,959 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66914.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:23:56,587 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-29 09:24:15,782 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.134e+02 3.769e+02 4.346e+02 5.384e+02 1.182e+03, threshold=8.692e+02, percent-clipped=4.0 +2023-03-29 09:24:22,088 INFO [train.py:892] (0/4) Epoch 37, batch 150, loss[loss=0.1443, simple_loss=0.2223, pruned_loss=0.03311, over 19788.00 frames. ], tot_loss[loss=0.1535, simple_loss=0.2339, pruned_loss=0.0365, over 2097028.67 frames. ], batch size: 168, lr: 4.25e-03, grad_scale: 16.0 +2023-03-29 09:25:24,914 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66957.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:25:35,442 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66962.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:26:19,106 INFO [train.py:892] (0/4) Epoch 37, batch 200, loss[loss=0.1366, simple_loss=0.2097, pruned_loss=0.03177, over 19776.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.2349, pruned_loss=0.03703, over 2507974.54 frames. ], batch size: 155, lr: 4.24e-03, grad_scale: 16.0 +2023-03-29 09:27:45,917 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67018.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 09:27:50,004 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6509, 2.7613, 4.0936, 3.2103, 3.3902, 3.1306, 2.3531, 2.5014], + device='cuda:0'), covar=tensor([0.1232, 0.3560, 0.0631, 0.1173, 0.1826, 0.1726, 0.2850, 0.2853], + device='cuda:0'), in_proj_covar=tensor([0.0355, 0.0394, 0.0352, 0.0292, 0.0378, 0.0387, 0.0383, 0.0352], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 09:28:07,315 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.541e+02 3.516e+02 4.008e+02 4.634e+02 9.207e+02, threshold=8.015e+02, percent-clipped=1.0 +2023-03-29 09:28:13,841 INFO [train.py:892] (0/4) Epoch 37, batch 250, loss[loss=0.1717, simple_loss=0.2485, pruned_loss=0.04739, over 19715.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.2339, pruned_loss=0.03729, over 2827699.83 frames. ], batch size: 62, lr: 4.24e-03, grad_scale: 16.0 +2023-03-29 09:28:57,129 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0130, 3.7747, 3.8721, 4.0703, 3.8719, 4.0492, 4.1110, 4.2810], + device='cuda:0'), covar=tensor([0.0692, 0.0482, 0.0524, 0.0403, 0.0659, 0.0541, 0.0442, 0.0308], + device='cuda:0'), in_proj_covar=tensor([0.0159, 0.0184, 0.0207, 0.0182, 0.0181, 0.0165, 0.0157, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 09:30:07,561 INFO [train.py:892] (0/4) Epoch 37, batch 300, loss[loss=0.1535, simple_loss=0.2263, pruned_loss=0.04041, over 19800.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2355, pruned_loss=0.0379, over 3076830.67 frames. ], batch size: 150, lr: 4.24e-03, grad_scale: 16.0 +2023-03-29 09:30:08,592 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0981, 3.0976, 2.9700, 2.6252, 3.0020, 2.4692, 2.2689, 1.5241], + device='cuda:0'), covar=tensor([0.0288, 0.0275, 0.0200, 0.0262, 0.0202, 0.0885, 0.0695, 0.1899], + device='cuda:0'), in_proj_covar=tensor([0.0107, 0.0150, 0.0116, 0.0137, 0.0121, 0.0137, 0.0145, 0.0130], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 09:30:14,785 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67083.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:30:22,970 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67087.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:31:58,038 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.283e+02 3.982e+02 4.417e+02 5.136e+02 1.288e+03, threshold=8.835e+02, percent-clipped=3.0 +2023-03-29 09:32:05,855 INFO [train.py:892] (0/4) Epoch 37, batch 350, loss[loss=0.1518, simple_loss=0.2273, pruned_loss=0.03811, over 19872.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2366, pruned_loss=0.03764, over 3269340.81 frames. ], batch size: 138, lr: 4.24e-03, grad_scale: 16.0 +2023-03-29 09:32:34,335 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67144.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:33:49,482 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67177.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:33:57,480 INFO [train.py:892] (0/4) Epoch 37, batch 400, loss[loss=0.1452, simple_loss=0.2293, pruned_loss=0.03052, over 19803.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2352, pruned_loss=0.03729, over 3421023.44 frames. ], batch size: 86, lr: 4.24e-03, grad_scale: 16.0 +2023-03-29 09:34:37,681 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3215, 4.1404, 4.1581, 3.8931, 4.3241, 2.9877, 3.6035, 2.0731], + device='cuda:0'), covar=tensor([0.0193, 0.0209, 0.0153, 0.0190, 0.0139, 0.1042, 0.0695, 0.1649], + device='cuda:0'), in_proj_covar=tensor([0.0107, 0.0149, 0.0116, 0.0137, 0.0121, 0.0136, 0.0144, 0.0130], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 09:35:22,649 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.49 vs. limit=5.0 +2023-03-29 09:35:34,618 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67225.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 09:35:41,238 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.424e+02 3.741e+02 4.233e+02 5.097e+02 1.151e+03, threshold=8.466e+02, percent-clipped=1.0 +2023-03-29 09:35:49,527 INFO [train.py:892] (0/4) Epoch 37, batch 450, loss[loss=0.1403, simple_loss=0.2241, pruned_loss=0.02822, over 19559.00 frames. ], tot_loss[loss=0.1557, simple_loss=0.2359, pruned_loss=0.03776, over 3538922.27 frames. ], batch size: 47, lr: 4.24e-03, grad_scale: 16.0 +2023-03-29 09:37:36,833 INFO [train.py:892] (0/4) Epoch 37, batch 500, loss[loss=0.1477, simple_loss=0.2197, pruned_loss=0.03784, over 19773.00 frames. ], tot_loss[loss=0.155, simple_loss=0.2348, pruned_loss=0.03761, over 3630896.42 frames. ], batch size: 205, lr: 4.23e-03, grad_scale: 16.0 +2023-03-29 09:38:14,008 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5555, 3.6875, 3.6862, 3.5429, 3.5328, 3.6360, 3.2054, 3.2247], + device='cuda:0'), covar=tensor([0.0598, 0.0601, 0.0596, 0.0585, 0.0736, 0.0590, 0.0727, 0.1133], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0298, 0.0307, 0.0268, 0.0280, 0.0260, 0.0276, 0.0322], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 09:38:24,322 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-29 09:38:51,675 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67313.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 09:39:24,437 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.430e+02 3.820e+02 4.510e+02 5.565e+02 1.076e+03, threshold=9.020e+02, percent-clipped=2.0 +2023-03-29 09:39:30,776 INFO [train.py:892] (0/4) Epoch 37, batch 550, loss[loss=0.1639, simple_loss=0.2428, pruned_loss=0.04244, over 19741.00 frames. ], tot_loss[loss=0.155, simple_loss=0.2349, pruned_loss=0.03758, over 3701064.06 frames. ], batch size: 291, lr: 4.23e-03, grad_scale: 16.0 +2023-03-29 09:39:39,878 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6164, 3.9173, 4.1770, 4.6753, 3.0744, 3.4952, 2.8777, 2.9104], + device='cuda:0'), covar=tensor([0.0508, 0.1866, 0.0822, 0.0384, 0.2082, 0.1106, 0.1346, 0.1659], + device='cuda:0'), in_proj_covar=tensor([0.0248, 0.0328, 0.0251, 0.0208, 0.0250, 0.0213, 0.0223, 0.0218], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 09:41:25,515 INFO [train.py:892] (0/4) Epoch 37, batch 600, loss[loss=0.1444, simple_loss=0.2264, pruned_loss=0.03125, over 19837.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2359, pruned_loss=0.0379, over 3756382.11 frames. ], batch size: 239, lr: 4.23e-03, grad_scale: 32.0 +2023-03-29 09:41:36,859 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5766, 3.2004, 3.5522, 3.0328, 3.8380, 3.7604, 4.3985, 4.8607], + device='cuda:0'), covar=tensor([0.0494, 0.1593, 0.1433, 0.2240, 0.1567, 0.1351, 0.0582, 0.0471], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0245, 0.0273, 0.0259, 0.0305, 0.0264, 0.0239, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 09:41:42,807 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67387.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 09:42:14,609 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5125, 3.4362, 3.4150, 3.1386, 3.5078, 2.5593, 2.8087, 1.6260], + device='cuda:0'), covar=tensor([0.0239, 0.0265, 0.0195, 0.0255, 0.0187, 0.1330, 0.0697, 0.1896], + device='cuda:0'), in_proj_covar=tensor([0.0106, 0.0148, 0.0115, 0.0135, 0.0120, 0.0135, 0.0143, 0.0128], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 09:42:26,364 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9234, 2.4999, 2.8049, 3.2182, 3.6794, 3.9835, 3.8521, 3.8544], + device='cuda:0'), covar=tensor([0.1071, 0.1777, 0.1431, 0.0677, 0.0444, 0.0296, 0.0440, 0.0534], + device='cuda:0'), in_proj_covar=tensor([0.0164, 0.0172, 0.0182, 0.0156, 0.0141, 0.0137, 0.0130, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 09:42:59,573 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-29 09:43:17,840 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.189e+02 3.534e+02 4.252e+02 5.287e+02 1.120e+03, threshold=8.504e+02, percent-clipped=1.0 +2023-03-29 09:43:23,994 INFO [train.py:892] (0/4) Epoch 37, batch 650, loss[loss=0.1414, simple_loss=0.211, pruned_loss=0.03591, over 19812.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.2363, pruned_loss=0.03866, over 3799451.46 frames. ], batch size: 132, lr: 4.23e-03, grad_scale: 32.0 +2023-03-29 09:43:33,653 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67435.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 09:43:42,591 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67439.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:43:51,834 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.57 vs. limit=2.0 +2023-03-29 09:44:21,141 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4359, 3.8158, 3.9734, 4.4351, 2.8985, 3.3931, 2.7635, 2.7640], + device='cuda:0'), covar=tensor([0.0487, 0.1693, 0.0800, 0.0395, 0.2001, 0.1030, 0.1292, 0.1547], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0325, 0.0249, 0.0206, 0.0248, 0.0210, 0.0221, 0.0216], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 09:45:16,686 INFO [train.py:892] (0/4) Epoch 37, batch 700, loss[loss=0.1747, simple_loss=0.2525, pruned_loss=0.04844, over 19781.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2372, pruned_loss=0.03908, over 3833309.19 frames. ], batch size: 280, lr: 4.23e-03, grad_scale: 32.0 +2023-03-29 09:46:31,356 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67513.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:47:05,441 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.319e+02 3.696e+02 4.491e+02 5.364e+02 8.859e+02, threshold=8.982e+02, percent-clipped=1.0 +2023-03-29 09:47:12,735 INFO [train.py:892] (0/4) Epoch 37, batch 750, loss[loss=0.1561, simple_loss=0.2334, pruned_loss=0.03939, over 19853.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2364, pruned_loss=0.0384, over 3859692.24 frames. ], batch size: 137, lr: 4.23e-03, grad_scale: 32.0 +2023-03-29 09:48:51,595 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67574.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 09:49:07,698 INFO [train.py:892] (0/4) Epoch 37, batch 800, loss[loss=0.1287, simple_loss=0.2126, pruned_loss=0.02241, over 19856.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.2364, pruned_loss=0.03825, over 3880326.03 frames. ], batch size: 106, lr: 4.22e-03, grad_scale: 32.0 +2023-03-29 09:50:18,525 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67613.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:50:51,539 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.125e+02 3.592e+02 4.129e+02 4.799e+02 7.012e+02, threshold=8.258e+02, percent-clipped=0.0 +2023-03-29 09:50:58,194 INFO [train.py:892] (0/4) Epoch 37, batch 850, loss[loss=0.192, simple_loss=0.2762, pruned_loss=0.05391, over 19675.00 frames. ], tot_loss[loss=0.1557, simple_loss=0.236, pruned_loss=0.03772, over 3895759.09 frames. ], batch size: 64, lr: 4.22e-03, grad_scale: 32.0 +2023-03-29 09:52:10,321 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67661.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:52:53,806 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67680.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:52:54,898 INFO [train.py:892] (0/4) Epoch 37, batch 900, loss[loss=0.1387, simple_loss=0.2194, pruned_loss=0.02905, over 19703.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2364, pruned_loss=0.03802, over 3907139.76 frames. ], batch size: 101, lr: 4.22e-03, grad_scale: 16.0 +2023-03-29 09:53:21,524 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67692.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:54:45,462 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.385e+02 3.765e+02 4.292e+02 5.023e+02 7.542e+02, threshold=8.583e+02, percent-clipped=0.0 +2023-03-29 09:54:49,555 INFO [train.py:892] (0/4) Epoch 37, batch 950, loss[loss=0.1654, simple_loss=0.2483, pruned_loss=0.04123, over 19809.00 frames. ], tot_loss[loss=0.1563, simple_loss=0.2367, pruned_loss=0.03797, over 3917013.19 frames. ], batch size: 68, lr: 4.22e-03, grad_scale: 16.0 +2023-03-29 09:55:09,631 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67739.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:55:13,997 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67741.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:55:41,422 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67753.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 09:56:23,383 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.8538, 6.1243, 6.1778, 6.0156, 5.9175, 6.1273, 5.4539, 5.4797], + device='cuda:0'), covar=tensor([0.0390, 0.0427, 0.0417, 0.0397, 0.0570, 0.0456, 0.0613, 0.1051], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0295, 0.0303, 0.0266, 0.0276, 0.0257, 0.0271, 0.0318], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 09:56:41,360 INFO [train.py:892] (0/4) Epoch 37, batch 1000, loss[loss=0.1816, simple_loss=0.242, pruned_loss=0.06058, over 19807.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2361, pruned_loss=0.03791, over 3924056.17 frames. ], batch size: 132, lr: 4.22e-03, grad_scale: 16.0 +2023-03-29 09:56:54,986 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67787.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 09:58:22,239 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-29 09:58:28,696 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.309e+02 3.606e+02 4.244e+02 4.990e+02 9.256e+02, threshold=8.487e+02, percent-clipped=3.0 +2023-03-29 09:58:34,536 INFO [train.py:892] (0/4) Epoch 37, batch 1050, loss[loss=0.1396, simple_loss=0.2283, pruned_loss=0.0254, over 19839.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.2379, pruned_loss=0.03866, over 3928818.23 frames. ], batch size: 59, lr: 4.22e-03, grad_scale: 16.0 +2023-03-29 09:58:35,481 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7395, 2.8527, 2.9282, 2.8719, 2.7030, 2.8398, 2.7306, 2.8608], + device='cuda:0'), covar=tensor([0.0328, 0.0301, 0.0339, 0.0289, 0.0471, 0.0324, 0.0361, 0.0335], + device='cuda:0'), in_proj_covar=tensor([0.0093, 0.0087, 0.0089, 0.0083, 0.0097, 0.0089, 0.0105, 0.0078], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 09:59:13,776 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.8961, 6.2379, 6.2977, 6.1101, 5.9639, 6.2052, 5.5466, 5.6102], + device='cuda:0'), covar=tensor([0.0424, 0.0416, 0.0387, 0.0406, 0.0494, 0.0482, 0.0584, 0.0909], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0297, 0.0303, 0.0267, 0.0278, 0.0258, 0.0272, 0.0319], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 10:00:00,864 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67869.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 10:00:25,292 INFO [train.py:892] (0/4) Epoch 37, batch 1100, loss[loss=0.1332, simple_loss=0.2145, pruned_loss=0.02594, over 19750.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2373, pruned_loss=0.03841, over 3933873.56 frames. ], batch size: 118, lr: 4.22e-03, grad_scale: 16.0 +2023-03-29 10:02:13,702 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.252e+02 3.748e+02 4.417e+02 5.113e+02 1.132e+03, threshold=8.834e+02, percent-clipped=3.0 +2023-03-29 10:02:17,996 INFO [train.py:892] (0/4) Epoch 37, batch 1150, loss[loss=0.1679, simple_loss=0.2443, pruned_loss=0.0458, over 19750.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2359, pruned_loss=0.03799, over 3939455.79 frames. ], batch size: 250, lr: 4.21e-03, grad_scale: 16.0 +2023-03-29 10:03:26,819 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3475, 4.4758, 2.5436, 4.7090, 4.9258, 2.1123, 4.1148, 3.4921], + device='cuda:0'), covar=tensor([0.0681, 0.0677, 0.2613, 0.0610, 0.0422, 0.2595, 0.0883, 0.0933], + device='cuda:0'), in_proj_covar=tensor([0.0241, 0.0264, 0.0235, 0.0283, 0.0263, 0.0207, 0.0244, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 10:04:11,697 INFO [train.py:892] (0/4) Epoch 37, batch 1200, loss[loss=0.1491, simple_loss=0.2244, pruned_loss=0.03693, over 19817.00 frames. ], tot_loss[loss=0.1563, simple_loss=0.2364, pruned_loss=0.03815, over 3940639.38 frames. ], batch size: 133, lr: 4.21e-03, grad_scale: 16.0 +2023-03-29 10:04:12,687 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67981.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:04:54,525 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-68000.pt +2023-03-29 10:05:19,880 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.66 vs. limit=5.0 +2023-03-29 10:06:05,466 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.592e+02 3.677e+02 4.252e+02 5.217e+02 1.129e+03, threshold=8.505e+02, percent-clipped=2.0 +2023-03-29 10:06:09,659 INFO [train.py:892] (0/4) Epoch 37, batch 1250, loss[loss=0.2009, simple_loss=0.2807, pruned_loss=0.06058, over 19707.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2361, pruned_loss=0.03813, over 3942707.36 frames. ], batch size: 325, lr: 4.21e-03, grad_scale: 16.0 +2023-03-29 10:06:20,216 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68036.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:06:34,055 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68042.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:06:50,956 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68048.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 10:08:04,325 INFO [train.py:892] (0/4) Epoch 37, batch 1300, loss[loss=0.1333, simple_loss=0.2143, pruned_loss=0.02617, over 19812.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2361, pruned_loss=0.03781, over 3943658.67 frames. ], batch size: 103, lr: 4.21e-03, grad_scale: 16.0 +2023-03-29 10:08:18,878 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68087.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 10:08:39,416 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2914, 3.5296, 3.1813, 2.7385, 3.1735, 3.4664, 3.4791, 3.5080], + device='cuda:0'), covar=tensor([0.0324, 0.0382, 0.0283, 0.0487, 0.0318, 0.0289, 0.0215, 0.0238], + device='cuda:0'), in_proj_covar=tensor([0.0109, 0.0102, 0.0104, 0.0104, 0.0108, 0.0092, 0.0092, 0.0092], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 10:09:12,792 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2171, 3.6348, 3.6950, 4.2141, 3.0671, 3.4748, 2.6625, 2.6875], + device='cuda:0'), covar=tensor([0.0529, 0.1759, 0.0884, 0.0381, 0.1785, 0.0855, 0.1423, 0.1638], + device='cuda:0'), in_proj_covar=tensor([0.0248, 0.0326, 0.0249, 0.0206, 0.0248, 0.0212, 0.0221, 0.0218], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 10:09:54,452 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.204e+02 3.685e+02 4.439e+02 5.105e+02 9.466e+02, threshold=8.879e+02, percent-clipped=1.0 +2023-03-29 10:09:58,204 INFO [train.py:892] (0/4) Epoch 37, batch 1350, loss[loss=0.1881, simple_loss=0.2754, pruned_loss=0.05041, over 19628.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.236, pruned_loss=0.03757, over 3945198.13 frames. ], batch size: 359, lr: 4.21e-03, grad_scale: 16.0 +2023-03-29 10:10:35,144 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68148.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 10:10:43,466 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-29 10:11:23,991 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68169.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:11:36,230 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1574, 3.4682, 2.9199, 2.5831, 3.0040, 3.3810, 3.3775, 3.3759], + device='cuda:0'), covar=tensor([0.0314, 0.0253, 0.0313, 0.0523, 0.0340, 0.0279, 0.0212, 0.0217], + device='cuda:0'), in_proj_covar=tensor([0.0110, 0.0103, 0.0105, 0.0105, 0.0109, 0.0093, 0.0093, 0.0093], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 10:11:38,226 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8941, 4.5531, 4.6400, 4.3405, 4.8435, 3.1371, 3.9198, 2.4423], + device='cuda:0'), covar=tensor([0.0158, 0.0188, 0.0128, 0.0177, 0.0128, 0.0958, 0.0824, 0.1470], + device='cuda:0'), in_proj_covar=tensor([0.0106, 0.0149, 0.0115, 0.0136, 0.0121, 0.0136, 0.0143, 0.0129], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 10:11:48,279 INFO [train.py:892] (0/4) Epoch 37, batch 1400, loss[loss=0.1501, simple_loss=0.2325, pruned_loss=0.03379, over 19818.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.236, pruned_loss=0.03749, over 3946695.37 frames. ], batch size: 229, lr: 4.21e-03, grad_scale: 16.0 +2023-03-29 10:13:09,624 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68217.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:13:35,443 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.616e+02 3.628e+02 4.259e+02 5.104e+02 1.150e+03, threshold=8.519e+02, percent-clipped=2.0 +2023-03-29 10:13:41,119 INFO [train.py:892] (0/4) Epoch 37, batch 1450, loss[loss=0.1367, simple_loss=0.2182, pruned_loss=0.0276, over 19898.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2351, pruned_loss=0.03709, over 3948689.93 frames. ], batch size: 91, lr: 4.20e-03, grad_scale: 16.0 +2023-03-29 10:14:33,281 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9533, 3.6318, 3.7967, 3.9446, 3.7693, 3.8947, 4.0252, 4.2294], + device='cuda:0'), covar=tensor([0.0662, 0.0480, 0.0579, 0.0422, 0.0707, 0.0613, 0.0466, 0.0322], + device='cuda:0'), in_proj_covar=tensor([0.0157, 0.0182, 0.0205, 0.0180, 0.0179, 0.0164, 0.0155, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 10:15:37,199 INFO [train.py:892] (0/4) Epoch 37, batch 1500, loss[loss=0.2189, simple_loss=0.2985, pruned_loss=0.06962, over 19626.00 frames. ], tot_loss[loss=0.1543, simple_loss=0.2348, pruned_loss=0.03689, over 3951393.98 frames. ], batch size: 359, lr: 4.20e-03, grad_scale: 16.0 +2023-03-29 10:17:23,946 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.350e+02 3.594e+02 4.207e+02 5.059e+02 8.265e+02, threshold=8.414e+02, percent-clipped=0.0 +2023-03-29 10:17:29,431 INFO [train.py:892] (0/4) Epoch 37, batch 1550, loss[loss=0.163, simple_loss=0.2537, pruned_loss=0.03612, over 19716.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2341, pruned_loss=0.03666, over 3952120.24 frames. ], batch size: 54, lr: 4.20e-03, grad_scale: 16.0 +2023-03-29 10:17:40,412 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68336.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:17:42,245 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68337.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:17:44,542 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2045, 3.3407, 2.1051, 3.4058, 3.4811, 1.7560, 2.9343, 2.7427], + device='cuda:0'), covar=tensor([0.0900, 0.0840, 0.2723, 0.0809, 0.0640, 0.2421, 0.1155, 0.0978], + device='cuda:0'), in_proj_covar=tensor([0.0241, 0.0264, 0.0235, 0.0283, 0.0262, 0.0207, 0.0244, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 10:17:44,732 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.36 vs. limit=5.0 +2023-03-29 10:18:06,646 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68348.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:19:17,873 INFO [train.py:892] (0/4) Epoch 37, batch 1600, loss[loss=0.1456, simple_loss=0.232, pruned_loss=0.02964, over 19794.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2351, pruned_loss=0.03719, over 3952718.80 frames. ], batch size: 68, lr: 4.20e-03, grad_scale: 16.0 +2023-03-29 10:19:26,508 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68384.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:19:54,639 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68396.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:20:01,312 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68399.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:21:05,153 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0524, 3.2983, 2.7905, 2.4943, 2.9986, 3.1559, 3.2016, 3.1762], + device='cuda:0'), covar=tensor([0.0324, 0.0268, 0.0331, 0.0519, 0.0325, 0.0298, 0.0275, 0.0227], + device='cuda:0'), in_proj_covar=tensor([0.0111, 0.0103, 0.0105, 0.0106, 0.0109, 0.0093, 0.0093, 0.0093], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 10:21:10,225 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.336e+02 3.362e+02 4.148e+02 5.414e+02 8.370e+02, threshold=8.296e+02, percent-clipped=0.0 +2023-03-29 10:21:15,836 INFO [train.py:892] (0/4) Epoch 37, batch 1650, loss[loss=0.1221, simple_loss=0.2006, pruned_loss=0.02179, over 19854.00 frames. ], tot_loss[loss=0.155, simple_loss=0.2357, pruned_loss=0.03716, over 3950478.76 frames. ], batch size: 104, lr: 4.20e-03, grad_scale: 16.0 +2023-03-29 10:21:23,136 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4611, 3.5522, 2.2244, 3.6884, 3.7891, 1.7823, 3.1228, 2.9900], + device='cuda:0'), covar=tensor([0.0817, 0.0822, 0.2758, 0.0859, 0.0639, 0.2725, 0.1232, 0.1000], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0265, 0.0236, 0.0284, 0.0263, 0.0208, 0.0244, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 10:21:44,739 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68443.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 10:22:24,168 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68460.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:23:09,505 INFO [train.py:892] (0/4) Epoch 37, batch 1700, loss[loss=0.1435, simple_loss=0.2299, pruned_loss=0.0285, over 19848.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2351, pruned_loss=0.03683, over 3951907.53 frames. ], batch size: 59, lr: 4.20e-03, grad_scale: 16.0 +2023-03-29 10:24:53,978 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.100e+02 3.371e+02 4.024e+02 4.531e+02 9.295e+02, threshold=8.049e+02, percent-clipped=2.0 +2023-03-29 10:24:57,888 INFO [train.py:892] (0/4) Epoch 37, batch 1750, loss[loss=0.1472, simple_loss=0.2312, pruned_loss=0.0316, over 19848.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2348, pruned_loss=0.03717, over 3952676.77 frames. ], batch size: 58, lr: 4.20e-03, grad_scale: 16.0 +2023-03-29 10:25:02,579 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5965, 2.9397, 3.1213, 3.4830, 2.4636, 3.1036, 2.3266, 2.3713], + device='cuda:0'), covar=tensor([0.0564, 0.1371, 0.0964, 0.0463, 0.2002, 0.0771, 0.1388, 0.1570], + device='cuda:0'), in_proj_covar=tensor([0.0249, 0.0328, 0.0251, 0.0208, 0.0249, 0.0213, 0.0223, 0.0219], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 10:25:37,658 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8289, 3.1494, 2.8071, 2.3782, 2.8679, 3.0975, 3.0420, 3.0660], + device='cuda:0'), covar=tensor([0.0349, 0.0259, 0.0302, 0.0522, 0.0335, 0.0291, 0.0240, 0.0235], + device='cuda:0'), in_proj_covar=tensor([0.0110, 0.0103, 0.0105, 0.0106, 0.0109, 0.0093, 0.0093, 0.0093], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 10:26:31,122 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68578.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:26:35,974 INFO [train.py:892] (0/4) Epoch 37, batch 1800, loss[loss=0.1568, simple_loss=0.2334, pruned_loss=0.04004, over 19770.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2359, pruned_loss=0.03794, over 3951291.22 frames. ], batch size: 253, lr: 4.19e-03, grad_scale: 16.0 +2023-03-29 10:28:03,168 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.060e+02 3.457e+02 4.221e+02 5.089e+02 1.107e+03, threshold=8.442e+02, percent-clipped=2.0 +2023-03-29 10:28:06,934 INFO [train.py:892] (0/4) Epoch 37, batch 1850, loss[loss=0.1575, simple_loss=0.2419, pruned_loss=0.03657, over 19821.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.2369, pruned_loss=0.03794, over 3950010.77 frames. ], batch size: 57, lr: 4.19e-03, grad_scale: 16.0 +2023-03-29 10:28:14,467 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-37.pt +2023-03-29 10:29:10,997 INFO [train.py:892] (0/4) Epoch 38, batch 0, loss[loss=0.1653, simple_loss=0.2498, pruned_loss=0.04037, over 19739.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2498, pruned_loss=0.04037, over 19739.00 frames. ], batch size: 221, lr: 4.14e-03, grad_scale: 16.0 +2023-03-29 10:29:10,998 INFO [train.py:917] (0/4) Computing validation loss +2023-03-29 10:29:46,258 INFO [train.py:926] (0/4) Epoch 38, validation: loss=0.1847, simple_loss=0.2497, pruned_loss=0.05979, over 2883724.00 frames. +2023-03-29 10:29:46,259 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-29 10:29:49,687 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68637.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:29:54,489 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68639.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:31:45,407 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68685.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:31:45,526 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68685.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:31:46,756 INFO [train.py:892] (0/4) Epoch 38, batch 50, loss[loss=0.1629, simple_loss=0.2387, pruned_loss=0.04355, over 19746.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2339, pruned_loss=0.03636, over 888887.93 frames. ], batch size: 276, lr: 4.13e-03, grad_scale: 16.0 +2023-03-29 10:33:14,535 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6498, 3.6044, 3.5698, 3.3187, 3.6844, 2.7355, 3.0215, 1.6989], + device='cuda:0'), covar=tensor([0.0259, 0.0260, 0.0181, 0.0236, 0.0188, 0.1282, 0.0700, 0.1821], + device='cuda:0'), in_proj_covar=tensor([0.0107, 0.0150, 0.0116, 0.0138, 0.0122, 0.0138, 0.0145, 0.0131], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 10:33:21,851 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.214e+02 3.660e+02 4.318e+02 5.204e+02 7.882e+02, threshold=8.636e+02, percent-clipped=0.0 +2023-03-29 10:33:36,551 INFO [train.py:892] (0/4) Epoch 38, batch 100, loss[loss=0.1325, simple_loss=0.2113, pruned_loss=0.02684, over 19773.00 frames. ], tot_loss[loss=0.151, simple_loss=0.2317, pruned_loss=0.03514, over 1568341.62 frames. ], batch size: 116, lr: 4.13e-03, grad_scale: 16.0 +2023-03-29 10:33:54,240 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68743.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 10:34:01,118 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68746.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:34:22,491 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68755.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:35:24,032 INFO [train.py:892] (0/4) Epoch 38, batch 150, loss[loss=0.1398, simple_loss=0.221, pruned_loss=0.02926, over 19535.00 frames. ], tot_loss[loss=0.1526, simple_loss=0.2329, pruned_loss=0.03616, over 2097179.30 frames. ], batch size: 46, lr: 4.13e-03, grad_scale: 16.0 +2023-03-29 10:35:35,750 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5785, 2.6164, 2.6354, 2.2037, 2.7429, 2.3345, 2.6900, 2.6412], + device='cuda:0'), covar=tensor([0.0510, 0.0569, 0.0638, 0.0904, 0.0502, 0.0555, 0.0513, 0.0397], + device='cuda:0'), in_proj_covar=tensor([0.0083, 0.0092, 0.0088, 0.0115, 0.0084, 0.0087, 0.0085, 0.0079], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 10:35:37,965 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68791.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 10:37:03,421 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.249e+02 3.607e+02 4.338e+02 5.110e+02 1.083e+03, threshold=8.676e+02, percent-clipped=2.0 +2023-03-29 10:37:19,984 INFO [train.py:892] (0/4) Epoch 38, batch 200, loss[loss=0.1422, simple_loss=0.2224, pruned_loss=0.03103, over 19782.00 frames. ], tot_loss[loss=0.1521, simple_loss=0.2326, pruned_loss=0.03581, over 2508870.84 frames. ], batch size: 52, lr: 4.13e-03, grad_scale: 16.0 +2023-03-29 10:37:20,851 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7324, 4.8713, 5.1546, 4.8959, 5.0240, 4.6627, 4.8931, 4.6307], + device='cuda:0'), covar=tensor([0.1615, 0.1419, 0.0833, 0.1288, 0.0863, 0.0903, 0.1893, 0.2172], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0341, 0.0377, 0.0309, 0.0284, 0.0288, 0.0367, 0.0397], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 10:37:55,504 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9151, 4.0315, 4.3109, 4.0506, 4.2583, 3.8511, 4.0277, 3.7763], + device='cuda:0'), covar=tensor([0.1629, 0.1661, 0.0981, 0.1377, 0.1033, 0.1108, 0.1952, 0.2330], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0341, 0.0377, 0.0309, 0.0284, 0.0289, 0.0368, 0.0398], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 10:39:12,964 INFO [train.py:892] (0/4) Epoch 38, batch 250, loss[loss=0.1608, simple_loss=0.2342, pruned_loss=0.04366, over 19836.00 frames. ], tot_loss[loss=0.1527, simple_loss=0.2332, pruned_loss=0.03606, over 2829076.36 frames. ], batch size: 197, lr: 4.13e-03, grad_scale: 16.0 +2023-03-29 10:40:13,397 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-29 10:40:53,757 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.221e+02 3.576e+02 4.212e+02 4.903e+02 9.274e+02, threshold=8.425e+02, percent-clipped=1.0 +2023-03-29 10:41:06,793 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68934.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:41:10,043 INFO [train.py:892] (0/4) Epoch 38, batch 300, loss[loss=0.1422, simple_loss=0.2225, pruned_loss=0.03095, over 19902.00 frames. ], tot_loss[loss=0.152, simple_loss=0.2323, pruned_loss=0.0358, over 3078469.58 frames. ], batch size: 80, lr: 4.13e-03, grad_scale: 16.0 +2023-03-29 10:42:28,301 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.64 vs. limit=5.0 +2023-03-29 10:43:04,367 INFO [train.py:892] (0/4) Epoch 38, batch 350, loss[loss=0.1438, simple_loss=0.2134, pruned_loss=0.03706, over 19852.00 frames. ], tot_loss[loss=0.1532, simple_loss=0.2335, pruned_loss=0.03643, over 3271363.06 frames. ], batch size: 144, lr: 4.13e-03, grad_scale: 16.0 +2023-03-29 10:43:19,759 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8376, 4.9149, 5.2359, 4.9891, 5.0898, 4.7846, 4.9561, 4.7436], + device='cuda:0'), covar=tensor([0.1530, 0.1603, 0.0832, 0.1329, 0.0783, 0.0871, 0.1906, 0.2145], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0345, 0.0380, 0.0312, 0.0286, 0.0291, 0.0370, 0.0401], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 10:44:39,916 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.577e+02 3.628e+02 4.401e+02 5.124e+02 1.005e+03, threshold=8.801e+02, percent-clipped=1.0 +2023-03-29 10:44:56,378 INFO [train.py:892] (0/4) Epoch 38, batch 400, loss[loss=0.1709, simple_loss=0.25, pruned_loss=0.04587, over 19781.00 frames. ], tot_loss[loss=0.1543, simple_loss=0.2345, pruned_loss=0.037, over 3419606.79 frames. ], batch size: 213, lr: 4.12e-03, grad_scale: 16.0 +2023-03-29 10:45:08,669 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69041.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:45:13,642 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69043.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:45:27,790 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9025, 3.7809, 4.1520, 3.7979, 3.5679, 4.0297, 3.8496, 4.2067], + device='cuda:0'), covar=tensor([0.0758, 0.0383, 0.0348, 0.0429, 0.1130, 0.0543, 0.0538, 0.0352], + device='cuda:0'), in_proj_covar=tensor([0.0288, 0.0229, 0.0228, 0.0240, 0.0209, 0.0251, 0.0242, 0.0226], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 10:45:34,749 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.63 vs. limit=5.0 +2023-03-29 10:45:41,691 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69055.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:46:50,040 INFO [train.py:892] (0/4) Epoch 38, batch 450, loss[loss=0.1604, simple_loss=0.2516, pruned_loss=0.03466, over 19699.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.2367, pruned_loss=0.03775, over 3534768.58 frames. ], batch size: 56, lr: 4.12e-03, grad_scale: 16.0 +2023-03-29 10:47:28,744 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69103.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:47:31,065 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69104.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:48:23,826 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.342e+02 3.756e+02 4.232e+02 5.064e+02 8.468e+02, threshold=8.465e+02, percent-clipped=0.0 +2023-03-29 10:48:24,734 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6552, 4.4617, 5.0390, 4.5340, 4.2028, 4.8686, 4.6725, 5.2309], + device='cuda:0'), covar=tensor([0.0955, 0.0478, 0.0418, 0.0421, 0.0897, 0.0531, 0.0560, 0.0386], + device='cuda:0'), in_proj_covar=tensor([0.0288, 0.0230, 0.0228, 0.0240, 0.0210, 0.0252, 0.0242, 0.0226], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 10:48:30,090 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8436, 3.9497, 2.4248, 4.1183, 4.2921, 1.8950, 3.5409, 3.2770], + device='cuda:0'), covar=tensor([0.0753, 0.0822, 0.2661, 0.0844, 0.0570, 0.2690, 0.1043, 0.0895], + device='cuda:0'), in_proj_covar=tensor([0.0239, 0.0263, 0.0234, 0.0283, 0.0262, 0.0206, 0.0243, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 10:48:39,785 INFO [train.py:892] (0/4) Epoch 38, batch 500, loss[loss=0.1508, simple_loss=0.2251, pruned_loss=0.03823, over 19788.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2354, pruned_loss=0.03757, over 3626941.89 frames. ], batch size: 191, lr: 4.12e-03, grad_scale: 16.0 +2023-03-29 10:49:38,238 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69162.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:49:52,930 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69168.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:50:22,712 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.9206, 6.0080, 6.0255, 6.0337, 5.8033, 5.9802, 5.3451, 5.1066], + device='cuda:0'), covar=tensor([0.0838, 0.0897, 0.0838, 0.0624, 0.0925, 0.0894, 0.1299, 0.2397], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0302, 0.0311, 0.0273, 0.0283, 0.0264, 0.0279, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 10:50:32,786 INFO [train.py:892] (0/4) Epoch 38, batch 550, loss[loss=0.1315, simple_loss=0.208, pruned_loss=0.02746, over 19760.00 frames. ], tot_loss[loss=0.1557, simple_loss=0.2358, pruned_loss=0.03782, over 3699098.56 frames. ], batch size: 188, lr: 4.12e-03, grad_scale: 16.0 +2023-03-29 10:50:42,636 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.68 vs. limit=5.0 +2023-03-29 10:50:54,151 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1613, 4.3795, 4.4229, 4.2966, 4.1592, 4.3734, 3.9725, 3.9732], + device='cuda:0'), covar=tensor([0.0612, 0.0556, 0.0511, 0.0489, 0.0671, 0.0553, 0.0708, 0.1061], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0303, 0.0311, 0.0273, 0.0283, 0.0264, 0.0279, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 10:51:40,796 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.7751, 1.6184, 1.8384, 1.7907, 1.7042, 1.7733, 1.5966, 1.8021], + device='cuda:0'), covar=tensor([0.0423, 0.0402, 0.0339, 0.0344, 0.0509, 0.0356, 0.0571, 0.0345], + device='cuda:0'), in_proj_covar=tensor([0.0092, 0.0087, 0.0090, 0.0084, 0.0096, 0.0089, 0.0106, 0.0079], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 10:51:57,145 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69223.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:52:02,594 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69225.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:52:11,492 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.978e+02 3.709e+02 4.324e+02 5.706e+02 8.766e+02, threshold=8.649e+02, percent-clipped=1.0 +2023-03-29 10:52:12,593 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69229.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:52:13,113 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.46 vs. limit=5.0 +2023-03-29 10:52:23,804 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69234.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:52:27,267 INFO [train.py:892] (0/4) Epoch 38, batch 600, loss[loss=0.1403, simple_loss=0.2178, pruned_loss=0.03144, over 19768.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.237, pruned_loss=0.03822, over 3753204.27 frames. ], batch size: 163, lr: 4.12e-03, grad_scale: 16.0 +2023-03-29 10:52:49,556 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69245.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 10:53:53,926 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69273.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:53:58,399 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3125, 3.5405, 2.0812, 3.6452, 3.7327, 1.7683, 2.8641, 2.6866], + device='cuda:0'), covar=tensor([0.1000, 0.0845, 0.3151, 0.0842, 0.0639, 0.2876, 0.1525, 0.1189], + device='cuda:0'), in_proj_covar=tensor([0.0240, 0.0264, 0.0236, 0.0284, 0.0263, 0.0207, 0.0244, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 10:54:14,113 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69282.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:54:14,180 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.4721, 5.8034, 5.9589, 5.6602, 5.6920, 5.6785, 5.5724, 5.4329], + device='cuda:0'), covar=tensor([0.1360, 0.1247, 0.0764, 0.1166, 0.0659, 0.0702, 0.1987, 0.1909], + device='cuda:0'), in_proj_covar=tensor([0.0303, 0.0344, 0.0379, 0.0311, 0.0287, 0.0290, 0.0370, 0.0399], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 10:54:22,215 INFO [train.py:892] (0/4) Epoch 38, batch 650, loss[loss=0.1593, simple_loss=0.2441, pruned_loss=0.03723, over 19839.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.236, pruned_loss=0.03807, over 3797137.53 frames. ], batch size: 128, lr: 4.12e-03, grad_scale: 16.0 +2023-03-29 10:54:23,368 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69286.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:55:09,544 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3967, 4.2812, 4.7012, 4.2990, 3.9771, 4.5173, 4.3314, 4.7417], + device='cuda:0'), covar=tensor([0.0735, 0.0366, 0.0324, 0.0369, 0.0939, 0.0522, 0.0469, 0.0359], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0231, 0.0228, 0.0241, 0.0211, 0.0252, 0.0242, 0.0226], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 10:55:11,532 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69306.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 10:55:42,110 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69320.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:55:59,561 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.644e+02 3.609e+02 4.195e+02 5.280e+02 9.691e+02, threshold=8.389e+02, percent-clipped=2.0 +2023-03-29 10:56:15,728 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69334.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 10:56:18,450 INFO [train.py:892] (0/4) Epoch 38, batch 700, loss[loss=0.1631, simple_loss=0.2389, pruned_loss=0.04365, over 19750.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2367, pruned_loss=0.03833, over 3831163.39 frames. ], batch size: 250, lr: 4.12e-03, grad_scale: 16.0 +2023-03-29 10:56:30,216 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69341.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:57:30,690 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69368.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:57:34,996 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-03-29 10:58:00,750 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69381.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:58:10,277 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0271, 3.7334, 3.8435, 4.0238, 3.8054, 4.0435, 4.1018, 4.2771], + device='cuda:0'), covar=tensor([0.0688, 0.0488, 0.0618, 0.0398, 0.0781, 0.0578, 0.0472, 0.0334], + device='cuda:0'), in_proj_covar=tensor([0.0160, 0.0184, 0.0207, 0.0182, 0.0181, 0.0165, 0.0158, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 10:58:11,791 INFO [train.py:892] (0/4) Epoch 38, batch 750, loss[loss=0.1476, simple_loss=0.2251, pruned_loss=0.035, over 19759.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2356, pruned_loss=0.03764, over 3856591.78 frames. ], batch size: 188, lr: 4.11e-03, grad_scale: 16.0 +2023-03-29 10:58:19,035 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69389.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:58:29,451 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69394.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:58:40,558 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69399.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 10:59:09,026 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-29 10:59:32,960 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-03-29 10:59:43,620 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.5897, 5.9870, 6.1116, 5.8620, 5.7969, 5.7380, 5.7140, 5.6351], + device='cuda:0'), covar=tensor([0.1357, 0.1139, 0.0861, 0.1285, 0.0593, 0.0677, 0.2038, 0.2009], + device='cuda:0'), in_proj_covar=tensor([0.0303, 0.0345, 0.0379, 0.0312, 0.0288, 0.0291, 0.0371, 0.0399], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 10:59:51,960 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.082e+02 3.634e+02 4.546e+02 5.374e+02 9.782e+02, threshold=9.091e+02, percent-clipped=2.0 +2023-03-29 10:59:53,059 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69429.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:00:07,015 INFO [train.py:892] (0/4) Epoch 38, batch 800, loss[loss=0.1523, simple_loss=0.2297, pruned_loss=0.03744, over 19749.00 frames. ], tot_loss[loss=0.1557, simple_loss=0.2358, pruned_loss=0.03777, over 3877729.92 frames. ], batch size: 44, lr: 4.11e-03, grad_scale: 16.0 +2023-03-29 11:00:53,705 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69455.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:02:01,824 INFO [train.py:892] (0/4) Epoch 38, batch 850, loss[loss=0.1949, simple_loss=0.3086, pruned_loss=0.04056, over 18801.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.2365, pruned_loss=0.03783, over 3893337.62 frames. ], batch size: 564, lr: 4.11e-03, grad_scale: 16.0 +2023-03-29 11:03:14,590 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69518.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:03:19,077 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-29 11:03:28,472 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69524.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:03:39,262 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.319e+02 3.651e+02 4.238e+02 5.380e+02 9.885e+02, threshold=8.476e+02, percent-clipped=3.0 +2023-03-29 11:03:54,824 INFO [train.py:892] (0/4) Epoch 38, batch 900, loss[loss=0.154, simple_loss=0.233, pruned_loss=0.03749, over 19897.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2362, pruned_loss=0.03766, over 3904424.61 frames. ], batch size: 91, lr: 4.11e-03, grad_scale: 16.0 +2023-03-29 11:03:58,013 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.27 vs. limit=5.0 +2023-03-29 11:04:37,576 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1969, 2.7253, 3.3800, 3.3186, 3.9107, 4.5025, 4.1768, 4.3276], + device='cuda:0'), covar=tensor([0.0918, 0.1663, 0.1188, 0.0730, 0.0402, 0.0204, 0.0384, 0.0441], + device='cuda:0'), in_proj_covar=tensor([0.0163, 0.0169, 0.0179, 0.0154, 0.0140, 0.0135, 0.0130, 0.0120], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 11:05:35,620 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69581.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:05:45,043 INFO [train.py:892] (0/4) Epoch 38, batch 950, loss[loss=0.1652, simple_loss=0.2399, pruned_loss=0.04525, over 19737.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.237, pruned_loss=0.03733, over 3912595.13 frames. ], batch size: 134, lr: 4.11e-03, grad_scale: 16.0 +2023-03-29 11:06:21,693 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69601.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 11:07:25,048 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.229e+02 3.525e+02 4.148e+02 4.911e+02 8.887e+02, threshold=8.297e+02, percent-clipped=1.0 +2023-03-29 11:07:25,828 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69629.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 11:07:42,216 INFO [train.py:892] (0/4) Epoch 38, batch 1000, loss[loss=0.1637, simple_loss=0.2434, pruned_loss=0.04203, over 19780.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2372, pruned_loss=0.03755, over 3920836.41 frames. ], batch size: 215, lr: 4.11e-03, grad_scale: 16.0 +2023-03-29 11:08:08,016 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-03-29 11:09:09,896 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69674.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 11:09:14,131 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69676.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:09:41,227 INFO [train.py:892] (0/4) Epoch 38, batch 1050, loss[loss=0.146, simple_loss=0.2211, pruned_loss=0.03547, over 19870.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2361, pruned_loss=0.03749, over 3927494.66 frames. ], batch size: 92, lr: 4.10e-03, grad_scale: 32.0 +2023-03-29 11:09:52,959 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-29 11:10:12,202 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69699.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:11:13,835 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69724.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:11:23,835 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.661e+02 3.738e+02 4.279e+02 5.183e+02 8.031e+02, threshold=8.559e+02, percent-clipped=0.0 +2023-03-29 11:11:41,177 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69735.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 11:11:42,284 INFO [train.py:892] (0/4) Epoch 38, batch 1100, loss[loss=0.1584, simple_loss=0.2524, pruned_loss=0.03225, over 19520.00 frames. ], tot_loss[loss=0.156, simple_loss=0.2364, pruned_loss=0.03776, over 3932545.20 frames. ], batch size: 54, lr: 4.10e-03, grad_scale: 32.0 +2023-03-29 11:12:07,673 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69747.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:12:15,075 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69750.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:13:36,911 INFO [train.py:892] (0/4) Epoch 38, batch 1150, loss[loss=0.1294, simple_loss=0.2173, pruned_loss=0.0207, over 19854.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.235, pruned_loss=0.03699, over 3937336.57 frames. ], batch size: 59, lr: 4.10e-03, grad_scale: 32.0 +2023-03-29 11:14:51,200 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69818.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:14:53,374 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9616, 2.9614, 1.8319, 3.4199, 3.2213, 3.3629, 3.4716, 2.8206], + device='cuda:0'), covar=tensor([0.0672, 0.0745, 0.1792, 0.0676, 0.0634, 0.0529, 0.0602, 0.0842], + device='cuda:0'), in_proj_covar=tensor([0.0147, 0.0147, 0.0146, 0.0157, 0.0137, 0.0141, 0.0153, 0.0150], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 11:15:03,328 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69824.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:15:13,289 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.448e+02 3.531e+02 4.116e+02 4.879e+02 7.988e+02, threshold=8.232e+02, percent-clipped=0.0 +2023-03-29 11:15:27,926 INFO [train.py:892] (0/4) Epoch 38, batch 1200, loss[loss=0.1823, simple_loss=0.268, pruned_loss=0.04826, over 19631.00 frames. ], tot_loss[loss=0.1552, simple_loss=0.2361, pruned_loss=0.03718, over 3940002.03 frames. ], batch size: 359, lr: 4.10e-03, grad_scale: 32.0 +2023-03-29 11:16:39,601 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69866.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:16:53,564 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69872.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:17:15,038 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69881.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:17:26,313 INFO [train.py:892] (0/4) Epoch 38, batch 1250, loss[loss=0.14, simple_loss=0.2227, pruned_loss=0.02864, over 19887.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2348, pruned_loss=0.03672, over 3942861.52 frames. ], batch size: 47, lr: 4.10e-03, grad_scale: 32.0 +2023-03-29 11:17:44,163 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4941, 3.6006, 2.2346, 4.1447, 3.7561, 4.0931, 4.1916, 3.2856], + device='cuda:0'), covar=tensor([0.0626, 0.0612, 0.1499, 0.0577, 0.0600, 0.0467, 0.0534, 0.0785], + device='cuda:0'), in_proj_covar=tensor([0.0147, 0.0148, 0.0146, 0.0158, 0.0137, 0.0141, 0.0153, 0.0150], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 11:18:00,497 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69901.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 11:18:19,764 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0513, 4.7845, 4.8695, 5.0839, 4.7998, 5.2909, 5.2245, 5.4109], + device='cuda:0'), covar=tensor([0.0722, 0.0359, 0.0501, 0.0383, 0.0689, 0.0429, 0.0389, 0.0308], + device='cuda:0'), in_proj_covar=tensor([0.0160, 0.0185, 0.0209, 0.0183, 0.0184, 0.0167, 0.0158, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 11:19:06,354 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69929.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:19:06,429 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69929.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 11:19:08,016 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.501e+02 3.622e+02 4.531e+02 5.693e+02 1.011e+03, threshold=9.061e+02, percent-clipped=2.0 +2023-03-29 11:19:20,445 INFO [train.py:892] (0/4) Epoch 38, batch 1300, loss[loss=0.1438, simple_loss=0.2269, pruned_loss=0.03028, over 19833.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.2354, pruned_loss=0.0368, over 3945241.78 frames. ], batch size: 184, lr: 4.10e-03, grad_scale: 16.0 +2023-03-29 11:19:38,875 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1383, 2.0097, 2.1554, 2.1232, 2.1363, 2.2349, 2.1029, 2.2081], + device='cuda:0'), covar=tensor([0.0382, 0.0368, 0.0332, 0.0357, 0.0452, 0.0325, 0.0494, 0.0345], + device='cuda:0'), in_proj_covar=tensor([0.0093, 0.0087, 0.0090, 0.0085, 0.0096, 0.0090, 0.0106, 0.0079], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 11:19:46,443 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69949.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 11:20:47,586 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69976.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:20:49,292 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69977.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:20:55,575 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6922, 4.3807, 4.4462, 4.6572, 4.3864, 4.7975, 4.7857, 5.0012], + device='cuda:0'), covar=tensor([0.0678, 0.0490, 0.0596, 0.0383, 0.0775, 0.0492, 0.0461, 0.0285], + device='cuda:0'), in_proj_covar=tensor([0.0161, 0.0185, 0.0209, 0.0184, 0.0184, 0.0168, 0.0159, 0.0208], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 11:21:07,865 INFO [train.py:892] (0/4) Epoch 38, batch 1350, loss[loss=0.1613, simple_loss=0.2466, pruned_loss=0.03807, over 19825.00 frames. ], tot_loss[loss=0.1534, simple_loss=0.2344, pruned_loss=0.03623, over 3947795.70 frames. ], batch size: 72, lr: 4.10e-03, grad_scale: 16.0 +2023-03-29 11:21:41,188 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9688, 5.2495, 5.3002, 5.1651, 4.9744, 5.2816, 4.7523, 4.8016], + device='cuda:0'), covar=tensor([0.0465, 0.0460, 0.0467, 0.0426, 0.0549, 0.0474, 0.0692, 0.0959], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0300, 0.0311, 0.0272, 0.0281, 0.0262, 0.0276, 0.0324], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 11:21:42,796 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-70000.pt +2023-03-29 11:22:40,229 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70024.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:22:40,435 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70024.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:22:51,496 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.339e+02 3.460e+02 3.863e+02 4.574e+02 6.887e+02, threshold=7.727e+02, percent-clipped=0.0 +2023-03-29 11:22:52,385 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70030.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 11:23:05,930 INFO [train.py:892] (0/4) Epoch 38, batch 1400, loss[loss=0.1684, simple_loss=0.2373, pruned_loss=0.04976, over 19759.00 frames. ], tot_loss[loss=0.1543, simple_loss=0.2352, pruned_loss=0.03665, over 3946467.92 frames. ], batch size: 217, lr: 4.09e-03, grad_scale: 16.0 +2023-03-29 11:23:41,195 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70050.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:24:30,325 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70072.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:25:00,839 INFO [train.py:892] (0/4) Epoch 38, batch 1450, loss[loss=0.1775, simple_loss=0.2562, pruned_loss=0.04944, over 19694.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2353, pruned_loss=0.03671, over 3947284.66 frames. ], batch size: 265, lr: 4.09e-03, grad_scale: 16.0 +2023-03-29 11:25:26,140 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70098.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:25:31,280 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4143, 2.6183, 3.7037, 3.0219, 3.1489, 3.0161, 2.2596, 2.4309], + device='cuda:0'), covar=tensor([0.1218, 0.3176, 0.0701, 0.1128, 0.1836, 0.1602, 0.2682, 0.2685], + device='cuda:0'), in_proj_covar=tensor([0.0358, 0.0399, 0.0356, 0.0295, 0.0381, 0.0392, 0.0386, 0.0357], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 11:25:53,080 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70110.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:26:05,885 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3236, 3.1644, 3.5117, 2.9717, 3.6072, 3.5728, 4.2089, 4.6032], + device='cuda:0'), covar=tensor([0.0521, 0.1563, 0.1392, 0.2090, 0.1565, 0.1420, 0.0566, 0.0603], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0246, 0.0275, 0.0260, 0.0307, 0.0264, 0.0239, 0.0267], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 11:26:38,225 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.553e+02 3.599e+02 4.475e+02 5.242e+02 1.040e+03, threshold=8.950e+02, percent-clipped=4.0 +2023-03-29 11:26:50,779 INFO [train.py:892] (0/4) Epoch 38, batch 1500, loss[loss=0.1441, simple_loss=0.223, pruned_loss=0.0326, over 19845.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.235, pruned_loss=0.037, over 3948540.42 frames. ], batch size: 109, lr: 4.09e-03, grad_scale: 16.0 +2023-03-29 11:28:04,393 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.9799, 6.2850, 6.2905, 6.1279, 6.0118, 6.2777, 5.5535, 5.6226], + device='cuda:0'), covar=tensor([0.0354, 0.0415, 0.0457, 0.0431, 0.0573, 0.0483, 0.0666, 0.0984], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0299, 0.0310, 0.0271, 0.0280, 0.0261, 0.0275, 0.0323], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 11:28:12,784 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70171.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:28:47,254 INFO [train.py:892] (0/4) Epoch 38, batch 1550, loss[loss=0.1498, simple_loss=0.2383, pruned_loss=0.03069, over 19669.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2366, pruned_loss=0.03786, over 3948040.62 frames. ], batch size: 51, lr: 4.09e-03, grad_scale: 16.0 +2023-03-29 11:29:15,035 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4546, 3.7046, 2.1589, 3.8374, 3.9217, 1.8344, 3.0298, 2.8119], + device='cuda:0'), covar=tensor([0.0927, 0.0750, 0.2813, 0.0742, 0.0595, 0.2782, 0.1342, 0.1053], + device='cuda:0'), in_proj_covar=tensor([0.0240, 0.0263, 0.0235, 0.0284, 0.0264, 0.0206, 0.0244, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 11:30:25,194 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.741e+02 3.595e+02 4.117e+02 4.902e+02 9.648e+02, threshold=8.233e+02, percent-clipped=1.0 +2023-03-29 11:30:39,812 INFO [train.py:892] (0/4) Epoch 38, batch 1600, loss[loss=0.1904, simple_loss=0.283, pruned_loss=0.04886, over 19538.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2371, pruned_loss=0.03769, over 3948492.81 frames. ], batch size: 54, lr: 4.09e-03, grad_scale: 16.0 +2023-03-29 11:31:32,914 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70260.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:32:29,340 INFO [train.py:892] (0/4) Epoch 38, batch 1650, loss[loss=0.15, simple_loss=0.2375, pruned_loss=0.03127, over 19681.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2363, pruned_loss=0.0373, over 3947079.02 frames. ], batch size: 55, lr: 4.09e-03, grad_scale: 16.0 +2023-03-29 11:33:05,027 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-03-29 11:33:52,806 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70321.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:34:12,141 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.483e+02 3.682e+02 4.356e+02 5.206e+02 1.027e+03, threshold=8.712e+02, percent-clipped=2.0 +2023-03-29 11:34:13,250 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70330.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 11:34:27,849 INFO [train.py:892] (0/4) Epoch 38, batch 1700, loss[loss=0.1465, simple_loss=0.2252, pruned_loss=0.03391, over 19804.00 frames. ], tot_loss[loss=0.155, simple_loss=0.2357, pruned_loss=0.03711, over 3947757.63 frames. ], batch size: 195, lr: 4.09e-03, grad_scale: 16.0 +2023-03-29 11:35:08,885 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-29 11:35:59,706 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70378.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 11:36:12,508 INFO [train.py:892] (0/4) Epoch 38, batch 1750, loss[loss=0.1374, simple_loss=0.2096, pruned_loss=0.03258, over 19875.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2361, pruned_loss=0.03753, over 3947942.67 frames. ], batch size: 159, lr: 4.08e-03, grad_scale: 16.0 +2023-03-29 11:36:29,262 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-29 11:37:39,932 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.846e+02 3.811e+02 4.551e+02 5.592e+02 1.733e+03, threshold=9.102e+02, percent-clipped=3.0 +2023-03-29 11:37:40,700 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1522, 2.2829, 2.2667, 1.9371, 2.3622, 1.9869, 2.3520, 2.2167], + device='cuda:0'), covar=tensor([0.0567, 0.0499, 0.0520, 0.0947, 0.0469, 0.0585, 0.0492, 0.0454], + device='cuda:0'), in_proj_covar=tensor([0.0083, 0.0092, 0.0089, 0.0115, 0.0084, 0.0087, 0.0085, 0.0079], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 11:37:51,277 INFO [train.py:892] (0/4) Epoch 38, batch 1800, loss[loss=0.15, simple_loss=0.2264, pruned_loss=0.03682, over 19804.00 frames. ], tot_loss[loss=0.1557, simple_loss=0.236, pruned_loss=0.03769, over 3949088.36 frames. ], batch size: 167, lr: 4.08e-03, grad_scale: 16.0 +2023-03-29 11:38:47,964 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70466.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:39:26,453 INFO [train.py:892] (0/4) Epoch 38, batch 1850, loss[loss=0.1473, simple_loss=0.2358, pruned_loss=0.02937, over 19826.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2386, pruned_loss=0.0377, over 3946126.34 frames. ], batch size: 57, lr: 4.08e-03, grad_scale: 16.0 +2023-03-29 11:39:34,163 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-38.pt +2023-03-29 11:40:29,563 INFO [train.py:892] (0/4) Epoch 39, batch 0, loss[loss=0.1384, simple_loss=0.2108, pruned_loss=0.03301, over 19768.00 frames. ], tot_loss[loss=0.1384, simple_loss=0.2108, pruned_loss=0.03301, over 19768.00 frames. ], batch size: 66, lr: 4.03e-03, grad_scale: 16.0 +2023-03-29 11:40:29,564 INFO [train.py:917] (0/4) Computing validation loss +2023-03-29 11:41:05,156 INFO [train.py:926] (0/4) Epoch 39, validation: loss=0.1858, simple_loss=0.25, pruned_loss=0.06079, over 2883724.00 frames. +2023-03-29 11:41:05,158 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-29 11:42:37,525 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.196e+02 3.428e+02 3.863e+02 4.713e+02 6.861e+02, threshold=7.726e+02, percent-clipped=0.0 +2023-03-29 11:42:50,014 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8827, 4.6831, 4.6837, 4.8988, 4.5543, 5.1382, 4.9155, 5.1650], + device='cuda:0'), covar=tensor([0.0866, 0.0562, 0.0703, 0.0511, 0.0843, 0.0592, 0.0780, 0.0495], + device='cuda:0'), in_proj_covar=tensor([0.0162, 0.0186, 0.0210, 0.0185, 0.0185, 0.0169, 0.0160, 0.0209], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 11:43:01,755 INFO [train.py:892] (0/4) Epoch 39, batch 50, loss[loss=0.1485, simple_loss=0.2345, pruned_loss=0.03122, over 19635.00 frames. ], tot_loss[loss=0.1512, simple_loss=0.2323, pruned_loss=0.03502, over 891102.69 frames. ], batch size: 72, lr: 4.03e-03, grad_scale: 16.0 +2023-03-29 11:44:59,647 INFO [train.py:892] (0/4) Epoch 39, batch 100, loss[loss=0.1558, simple_loss=0.244, pruned_loss=0.03379, over 19688.00 frames. ], tot_loss[loss=0.1513, simple_loss=0.2327, pruned_loss=0.03492, over 1568686.91 frames. ], batch size: 325, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:45:57,233 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70616.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:46:29,512 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.491e+02 3.857e+02 4.460e+02 4.985e+02 1.010e+03, threshold=8.919e+02, percent-clipped=5.0 +2023-03-29 11:46:43,662 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8723, 3.2058, 3.3162, 3.8358, 2.6251, 3.1894, 2.5301, 2.4473], + device='cuda:0'), covar=tensor([0.0604, 0.1943, 0.1068, 0.0512, 0.2102, 0.0968, 0.1486, 0.1803], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0328, 0.0250, 0.0208, 0.0249, 0.0212, 0.0222, 0.0220], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 11:46:53,604 INFO [train.py:892] (0/4) Epoch 39, batch 150, loss[loss=0.1697, simple_loss=0.2622, pruned_loss=0.03858, over 19837.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2348, pruned_loss=0.03594, over 2096069.23 frames. ], batch size: 90, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:48:51,190 INFO [train.py:892] (0/4) Epoch 39, batch 200, loss[loss=0.1402, simple_loss=0.2131, pruned_loss=0.03367, over 19877.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.2356, pruned_loss=0.03674, over 2506536.73 frames. ], batch size: 139, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:49:57,822 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.3697, 2.3940, 2.4753, 2.4667, 2.4772, 2.4614, 2.4803, 2.5654], + device='cuda:0'), covar=tensor([0.0470, 0.0367, 0.0379, 0.0330, 0.0430, 0.0380, 0.0491, 0.0362], + device='cuda:0'), in_proj_covar=tensor([0.0093, 0.0088, 0.0090, 0.0085, 0.0097, 0.0090, 0.0106, 0.0079], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 11:50:22,138 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.699e+02 3.379e+02 4.143e+02 5.094e+02 9.478e+02, threshold=8.286e+02, percent-clipped=1.0 +2023-03-29 11:50:49,046 INFO [train.py:892] (0/4) Epoch 39, batch 250, loss[loss=0.1484, simple_loss=0.2393, pruned_loss=0.02876, over 19584.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.2356, pruned_loss=0.0364, over 2824925.03 frames. ], batch size: 53, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:51:47,818 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70766.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:52:29,756 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5850, 2.7138, 2.7433, 2.6701, 2.6542, 2.6991, 2.6594, 2.8425], + device='cuda:0'), covar=tensor([0.0428, 0.0329, 0.0382, 0.0338, 0.0433, 0.0391, 0.0449, 0.0352], + device='cuda:0'), in_proj_covar=tensor([0.0092, 0.0087, 0.0090, 0.0084, 0.0097, 0.0090, 0.0106, 0.0079], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 11:52:42,430 INFO [train.py:892] (0/4) Epoch 39, batch 300, loss[loss=0.1301, simple_loss=0.2109, pruned_loss=0.02467, over 19726.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2351, pruned_loss=0.03573, over 3073082.89 frames. ], batch size: 99, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:52:55,408 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8292, 3.7358, 3.6723, 3.4675, 3.8435, 2.8219, 3.1834, 1.8516], + device='cuda:0'), covar=tensor([0.0218, 0.0233, 0.0166, 0.0199, 0.0153, 0.1136, 0.0701, 0.1637], + device='cuda:0'), in_proj_covar=tensor([0.0107, 0.0150, 0.0116, 0.0138, 0.0122, 0.0137, 0.0145, 0.0130], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 11:53:37,572 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70814.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:53:42,665 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-29 11:54:14,736 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.577e+02 3.476e+02 3.994e+02 4.897e+02 8.420e+02, threshold=7.989e+02, percent-clipped=1.0 +2023-03-29 11:54:43,429 INFO [train.py:892] (0/4) Epoch 39, batch 350, loss[loss=0.1529, simple_loss=0.2416, pruned_loss=0.03205, over 19720.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2354, pruned_loss=0.03611, over 3267275.37 frames. ], batch size: 269, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:56:39,297 INFO [train.py:892] (0/4) Epoch 39, batch 400, loss[loss=0.1256, simple_loss=0.2023, pruned_loss=0.02445, over 19801.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2353, pruned_loss=0.03607, over 3418081.36 frames. ], batch size: 211, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:57:38,037 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70916.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 11:58:08,797 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.576e+02 3.611e+02 4.279e+02 5.158e+02 1.128e+03, threshold=8.559e+02, percent-clipped=5.0 +2023-03-29 11:58:31,805 INFO [train.py:892] (0/4) Epoch 39, batch 450, loss[loss=0.1672, simple_loss=0.2504, pruned_loss=0.04195, over 19872.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2359, pruned_loss=0.03638, over 3534867.42 frames. ], batch size: 99, lr: 4.02e-03, grad_scale: 16.0 +2023-03-29 11:59:27,378 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70964.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:00:29,090 INFO [train.py:892] (0/4) Epoch 39, batch 500, loss[loss=0.1316, simple_loss=0.2066, pruned_loss=0.02826, over 19754.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2351, pruned_loss=0.03615, over 3627136.25 frames. ], batch size: 44, lr: 4.01e-03, grad_scale: 16.0 +2023-03-29 12:00:32,123 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70992.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:02:01,495 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.739e+02 3.809e+02 4.349e+02 5.400e+02 1.199e+03, threshold=8.697e+02, percent-clipped=2.0 +2023-03-29 12:02:02,407 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71030.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:02:27,677 INFO [train.py:892] (0/4) Epoch 39, batch 550, loss[loss=0.1497, simple_loss=0.237, pruned_loss=0.0312, over 19926.00 frames. ], tot_loss[loss=0.1539, simple_loss=0.2345, pruned_loss=0.03661, over 3700011.78 frames. ], batch size: 51, lr: 4.01e-03, grad_scale: 16.0 +2023-03-29 12:02:53,939 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71053.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:04:21,959 INFO [train.py:892] (0/4) Epoch 39, batch 600, loss[loss=0.1505, simple_loss=0.2348, pruned_loss=0.03309, over 19551.00 frames. ], tot_loss[loss=0.1539, simple_loss=0.2345, pruned_loss=0.03666, over 3754506.15 frames. ], batch size: 47, lr: 4.01e-03, grad_scale: 16.0 +2023-03-29 12:04:22,868 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71091.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:04:30,048 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.95 vs. limit=5.0 +2023-03-29 12:05:55,694 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.519e+02 3.574e+02 4.295e+02 5.387e+02 1.048e+03, threshold=8.591e+02, percent-clipped=4.0 +2023-03-29 12:05:59,100 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7759, 3.1766, 3.5555, 3.1297, 3.8362, 3.8051, 4.4945, 5.0057], + device='cuda:0'), covar=tensor([0.0461, 0.1649, 0.1424, 0.2119, 0.1741, 0.1349, 0.0544, 0.0466], + device='cuda:0'), in_proj_covar=tensor([0.0259, 0.0244, 0.0271, 0.0258, 0.0305, 0.0262, 0.0236, 0.0266], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 12:06:26,050 INFO [train.py:892] (0/4) Epoch 39, batch 650, loss[loss=0.1531, simple_loss=0.2417, pruned_loss=0.03226, over 19723.00 frames. ], tot_loss[loss=0.1536, simple_loss=0.2344, pruned_loss=0.03644, over 3795991.28 frames. ], batch size: 76, lr: 4.01e-03, grad_scale: 16.0 +2023-03-29 12:07:21,377 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-29 12:07:51,050 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7280, 2.7021, 2.9625, 2.5504, 3.1064, 3.0470, 3.5861, 3.9272], + device='cuda:0'), covar=tensor([0.0753, 0.1780, 0.1702, 0.2369, 0.1642, 0.1579, 0.0707, 0.0647], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0243, 0.0270, 0.0257, 0.0303, 0.0261, 0.0235, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 12:07:51,351 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.62 vs. limit=5.0 +2023-03-29 12:08:25,807 INFO [train.py:892] (0/4) Epoch 39, batch 700, loss[loss=0.1511, simple_loss=0.24, pruned_loss=0.03104, over 19888.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.235, pruned_loss=0.03671, over 3830575.34 frames. ], batch size: 62, lr: 4.01e-03, grad_scale: 16.0 +2023-03-29 12:08:29,625 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.74 vs. limit=5.0 +2023-03-29 12:09:56,520 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.218e+02 3.479e+02 4.128e+02 5.224e+02 8.144e+02, threshold=8.256e+02, percent-clipped=0.0 +2023-03-29 12:10:19,500 INFO [train.py:892] (0/4) Epoch 39, batch 750, loss[loss=0.1508, simple_loss=0.2323, pruned_loss=0.03465, over 19790.00 frames. ], tot_loss[loss=0.1535, simple_loss=0.2343, pruned_loss=0.03638, over 3857533.13 frames. ], batch size: 236, lr: 4.01e-03, grad_scale: 16.0 +2023-03-29 12:12:12,837 INFO [train.py:892] (0/4) Epoch 39, batch 800, loss[loss=0.1314, simple_loss=0.2206, pruned_loss=0.02109, over 19802.00 frames. ], tot_loss[loss=0.1535, simple_loss=0.2343, pruned_loss=0.03631, over 3878045.60 frames. ], batch size: 105, lr: 4.01e-03, grad_scale: 16.0 +2023-03-29 12:13:43,656 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.446e+02 3.401e+02 4.193e+02 5.158e+02 9.890e+02, threshold=8.386e+02, percent-clipped=3.0 +2023-03-29 12:14:08,925 INFO [train.py:892] (0/4) Epoch 39, batch 850, loss[loss=0.1346, simple_loss=0.2134, pruned_loss=0.02791, over 19698.00 frames. ], tot_loss[loss=0.1534, simple_loss=0.2343, pruned_loss=0.03621, over 3893853.20 frames. ], batch size: 45, lr: 4.00e-03, grad_scale: 16.0 +2023-03-29 12:14:25,013 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71348.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:15:10,952 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6244, 2.1195, 2.4297, 2.7852, 3.2193, 3.3522, 3.2580, 3.2987], + device='cuda:0'), covar=tensor([0.1079, 0.1703, 0.1462, 0.0789, 0.0541, 0.0380, 0.0475, 0.0600], + device='cuda:0'), in_proj_covar=tensor([0.0164, 0.0169, 0.0182, 0.0155, 0.0140, 0.0136, 0.0130, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 12:15:33,099 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7728, 2.3319, 2.5713, 2.9461, 3.3674, 3.5311, 3.4637, 3.4652], + device='cuda:0'), covar=tensor([0.1052, 0.1600, 0.1419, 0.0776, 0.0514, 0.0357, 0.0462, 0.0511], + device='cuda:0'), in_proj_covar=tensor([0.0164, 0.0169, 0.0181, 0.0155, 0.0140, 0.0136, 0.0130, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 12:15:53,092 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71386.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:16:03,890 INFO [train.py:892] (0/4) Epoch 39, batch 900, loss[loss=0.1362, simple_loss=0.2258, pruned_loss=0.02332, over 19823.00 frames. ], tot_loss[loss=0.1529, simple_loss=0.2337, pruned_loss=0.03604, over 3906421.33 frames. ], batch size: 93, lr: 4.00e-03, grad_scale: 16.0 +2023-03-29 12:16:22,989 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6703, 4.3436, 4.4113, 4.1527, 4.6354, 3.1294, 3.8033, 2.2466], + device='cuda:0'), covar=tensor([0.0181, 0.0238, 0.0154, 0.0211, 0.0150, 0.1022, 0.0813, 0.1545], + device='cuda:0'), in_proj_covar=tensor([0.0108, 0.0151, 0.0116, 0.0138, 0.0122, 0.0138, 0.0146, 0.0130], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-29 12:16:59,520 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.26 vs. limit=5.0 +2023-03-29 12:17:36,395 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.339e+02 3.295e+02 3.866e+02 4.930e+02 9.721e+02, threshold=7.731e+02, percent-clipped=3.0 +2023-03-29 12:18:00,860 INFO [train.py:892] (0/4) Epoch 39, batch 950, loss[loss=0.156, simple_loss=0.2492, pruned_loss=0.03142, over 19813.00 frames. ], tot_loss[loss=0.1521, simple_loss=0.2331, pruned_loss=0.03554, over 3917486.87 frames. ], batch size: 50, lr: 4.00e-03, grad_scale: 8.0 +2023-03-29 12:19:56,189 INFO [train.py:892] (0/4) Epoch 39, batch 1000, loss[loss=0.1757, simple_loss=0.2512, pruned_loss=0.05014, over 19773.00 frames. ], tot_loss[loss=0.1523, simple_loss=0.2333, pruned_loss=0.0357, over 3925400.11 frames. ], batch size: 241, lr: 4.00e-03, grad_scale: 8.0 +2023-03-29 12:21:17,928 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-29 12:21:28,780 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.429e+02 3.687e+02 4.367e+02 5.594e+02 8.534e+02, threshold=8.735e+02, percent-clipped=3.0 +2023-03-29 12:21:52,907 INFO [train.py:892] (0/4) Epoch 39, batch 1050, loss[loss=0.1515, simple_loss=0.2333, pruned_loss=0.03486, over 19754.00 frames. ], tot_loss[loss=0.1539, simple_loss=0.2351, pruned_loss=0.03633, over 3927761.73 frames. ], batch size: 256, lr: 4.00e-03, grad_scale: 8.0 +2023-03-29 12:23:09,888 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-03-29 12:23:33,653 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5647, 2.6827, 3.9745, 3.1399, 3.3137, 3.1103, 2.3389, 2.4395], + device='cuda:0'), covar=tensor([0.1254, 0.3382, 0.0624, 0.1159, 0.1908, 0.1688, 0.2799, 0.2825], + device='cuda:0'), in_proj_covar=tensor([0.0353, 0.0394, 0.0352, 0.0293, 0.0377, 0.0387, 0.0382, 0.0355], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 12:23:45,682 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7806, 2.6963, 2.8010, 2.7665, 2.7988, 2.8781, 2.6530, 2.8303], + device='cuda:0'), covar=tensor([0.0291, 0.0402, 0.0357, 0.0342, 0.0398, 0.0317, 0.0428, 0.0372], + device='cuda:0'), in_proj_covar=tensor([0.0093, 0.0088, 0.0091, 0.0086, 0.0098, 0.0090, 0.0106, 0.0080], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 12:23:51,060 INFO [train.py:892] (0/4) Epoch 39, batch 1100, loss[loss=0.1738, simple_loss=0.2513, pruned_loss=0.04817, over 19835.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2356, pruned_loss=0.03634, over 3933537.60 frames. ], batch size: 161, lr: 4.00e-03, grad_scale: 8.0 +2023-03-29 12:24:27,936 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5304, 2.4981, 2.7568, 2.4235, 2.9277, 2.8534, 3.4476, 3.6822], + device='cuda:0'), covar=tensor([0.0713, 0.1871, 0.1730, 0.2393, 0.1769, 0.1638, 0.0658, 0.0707], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0246, 0.0274, 0.0261, 0.0307, 0.0264, 0.0239, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 12:24:43,341 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.43 vs. limit=5.0 +2023-03-29 12:24:48,567 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71616.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:25:24,245 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.877e+02 3.477e+02 4.222e+02 5.368e+02 8.846e+02, threshold=8.443e+02, percent-clipped=1.0 +2023-03-29 12:25:46,049 INFO [train.py:892] (0/4) Epoch 39, batch 1150, loss[loss=0.1422, simple_loss=0.2165, pruned_loss=0.03401, over 19831.00 frames. ], tot_loss[loss=0.1536, simple_loss=0.2347, pruned_loss=0.0363, over 3937844.41 frames. ], batch size: 177, lr: 4.00e-03, grad_scale: 8.0 +2023-03-29 12:26:02,217 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=71648.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:26:28,811 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2231, 3.2320, 2.0531, 3.7684, 3.3915, 3.7226, 3.7979, 2.9832], + device='cuda:0'), covar=tensor([0.0653, 0.0700, 0.1806, 0.0751, 0.0697, 0.0589, 0.0613, 0.0920], + device='cuda:0'), in_proj_covar=tensor([0.0148, 0.0148, 0.0146, 0.0158, 0.0137, 0.0142, 0.0152, 0.0151], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 12:27:08,293 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71677.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:27:26,035 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=71686.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:27:38,332 INFO [train.py:892] (0/4) Epoch 39, batch 1200, loss[loss=0.1458, simple_loss=0.2202, pruned_loss=0.03573, over 19866.00 frames. ], tot_loss[loss=0.154, simple_loss=0.2352, pruned_loss=0.03641, over 3939099.72 frames. ], batch size: 129, lr: 3.99e-03, grad_scale: 8.0 +2023-03-29 12:27:51,585 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71696.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:29:12,306 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.421e+02 3.452e+02 4.248e+02 4.889e+02 8.041e+02, threshold=8.496e+02, percent-clipped=0.0 +2023-03-29 12:29:21,101 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71734.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:29:35,335 INFO [train.py:892] (0/4) Epoch 39, batch 1250, loss[loss=0.1551, simple_loss=0.2298, pruned_loss=0.0402, over 19836.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2346, pruned_loss=0.03654, over 3941591.19 frames. ], batch size: 171, lr: 3.99e-03, grad_scale: 8.0 +2023-03-29 12:31:32,670 INFO [train.py:892] (0/4) Epoch 39, batch 1300, loss[loss=0.1561, simple_loss=0.23, pruned_loss=0.04107, over 19789.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2356, pruned_loss=0.03677, over 3940842.72 frames. ], batch size: 87, lr: 3.99e-03, grad_scale: 8.0 +2023-03-29 12:33:04,689 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.312e+02 3.787e+02 4.345e+02 5.084e+02 1.281e+03, threshold=8.689e+02, percent-clipped=1.0 +2023-03-29 12:33:29,417 INFO [train.py:892] (0/4) Epoch 39, batch 1350, loss[loss=0.1412, simple_loss=0.2257, pruned_loss=0.02832, over 19785.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.2351, pruned_loss=0.03665, over 3943835.17 frames. ], batch size: 211, lr: 3.99e-03, grad_scale: 8.0 +2023-03-29 12:34:22,300 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3394, 2.5654, 2.7065, 2.4052, 2.8759, 2.8804, 3.1864, 3.4603], + device='cuda:0'), covar=tensor([0.0738, 0.1610, 0.1696, 0.2283, 0.1488, 0.1426, 0.0774, 0.0670], + device='cuda:0'), in_proj_covar=tensor([0.0262, 0.0246, 0.0274, 0.0262, 0.0308, 0.0266, 0.0239, 0.0269], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 12:34:30,853 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6625, 4.4822, 5.0125, 4.4852, 4.1380, 4.8007, 4.6860, 5.1169], + device='cuda:0'), covar=tensor([0.0780, 0.0379, 0.0325, 0.0359, 0.0841, 0.0500, 0.0415, 0.0305], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0227, 0.0227, 0.0239, 0.0210, 0.0251, 0.0240, 0.0225], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 12:35:29,334 INFO [train.py:892] (0/4) Epoch 39, batch 1400, loss[loss=0.1589, simple_loss=0.2386, pruned_loss=0.0396, over 19760.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2354, pruned_loss=0.03691, over 3943378.78 frames. ], batch size: 273, lr: 3.99e-03, grad_scale: 8.0 +2023-03-29 12:36:37,972 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71919.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:37:03,101 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.539e+02 3.590e+02 4.169e+02 5.056e+02 8.405e+02, threshold=8.337e+02, percent-clipped=0.0 +2023-03-29 12:37:24,621 INFO [train.py:892] (0/4) Epoch 39, batch 1450, loss[loss=0.2219, simple_loss=0.3033, pruned_loss=0.07025, over 19462.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2353, pruned_loss=0.03648, over 3945406.01 frames. ], batch size: 396, lr: 3.99e-03, grad_scale: 8.0 +2023-03-29 12:37:28,860 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1839, 2.0764, 2.2284, 2.2355, 2.2233, 2.2576, 2.2564, 2.2720], + device='cuda:0'), covar=tensor([0.0447, 0.0468, 0.0380, 0.0384, 0.0485, 0.0395, 0.0467, 0.0416], + device='cuda:0'), in_proj_covar=tensor([0.0094, 0.0089, 0.0091, 0.0085, 0.0098, 0.0091, 0.0106, 0.0079], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 12:38:39,082 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71972.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:38:58,405 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71980.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:39:24,735 INFO [train.py:892] (0/4) Epoch 39, batch 1500, loss[loss=0.2034, simple_loss=0.2843, pruned_loss=0.0613, over 19643.00 frames. ], tot_loss[loss=0.1543, simple_loss=0.2356, pruned_loss=0.03655, over 3945691.22 frames. ], batch size: 351, lr: 3.99e-03, grad_scale: 8.0 +2023-03-29 12:39:48,204 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-72000.pt +2023-03-29 12:40:25,723 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3938, 3.0581, 3.4765, 2.9508, 3.6335, 3.5415, 4.2375, 4.6653], + device='cuda:0'), covar=tensor([0.0525, 0.1701, 0.1440, 0.2227, 0.1604, 0.1552, 0.0606, 0.0529], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0246, 0.0274, 0.0261, 0.0307, 0.0266, 0.0240, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 12:40:36,442 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-29 12:40:49,397 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8421, 3.7951, 3.6912, 3.4510, 3.8705, 2.7767, 3.1006, 1.7972], + device='cuda:0'), covar=tensor([0.0316, 0.0292, 0.0232, 0.0284, 0.0251, 0.1434, 0.0975, 0.2150], + device='cuda:0'), in_proj_covar=tensor([0.0107, 0.0150, 0.0116, 0.0137, 0.0122, 0.0136, 0.0145, 0.0129], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 12:41:04,304 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.372e+02 3.694e+02 4.331e+02 4.992e+02 8.331e+02, threshold=8.661e+02, percent-clipped=0.0 +2023-03-29 12:41:29,070 INFO [train.py:892] (0/4) Epoch 39, batch 1550, loss[loss=0.1669, simple_loss=0.2519, pruned_loss=0.041, over 19799.00 frames. ], tot_loss[loss=0.154, simple_loss=0.2355, pruned_loss=0.03628, over 3947172.64 frames. ], batch size: 86, lr: 3.98e-03, grad_scale: 8.0 +2023-03-29 12:42:10,407 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4425, 4.4979, 4.8019, 4.5236, 4.7219, 4.2969, 4.5789, 4.2952], + device='cuda:0'), covar=tensor([0.1584, 0.1933, 0.1013, 0.1516, 0.0918, 0.1088, 0.1955, 0.2263], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0347, 0.0383, 0.0311, 0.0287, 0.0294, 0.0373, 0.0401], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 12:42:40,605 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2752, 3.6659, 3.1004, 2.6779, 3.1782, 3.4975, 3.4575, 3.5047], + device='cuda:0'), covar=tensor([0.0262, 0.0236, 0.0288, 0.0473, 0.0320, 0.0336, 0.0248, 0.0203], + device='cuda:0'), in_proj_covar=tensor([0.0113, 0.0106, 0.0107, 0.0108, 0.0111, 0.0095, 0.0096, 0.0095], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 12:43:25,413 INFO [train.py:892] (0/4) Epoch 39, batch 1600, loss[loss=0.1426, simple_loss=0.2226, pruned_loss=0.0313, over 19767.00 frames. ], tot_loss[loss=0.1528, simple_loss=0.2345, pruned_loss=0.03552, over 3948532.53 frames. ], batch size: 188, lr: 3.98e-03, grad_scale: 8.0 +2023-03-29 12:44:50,130 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72129.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:44:53,592 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.272e+02 3.496e+02 4.151e+02 4.701e+02 9.677e+02, threshold=8.302e+02, percent-clipped=1.0 +2023-03-29 12:45:19,119 INFO [train.py:892] (0/4) Epoch 39, batch 1650, loss[loss=0.1453, simple_loss=0.2318, pruned_loss=0.02936, over 19759.00 frames. ], tot_loss[loss=0.151, simple_loss=0.2323, pruned_loss=0.03487, over 3950020.43 frames. ], batch size: 49, lr: 3.98e-03, grad_scale: 8.0 +2023-03-29 12:46:19,397 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4169, 3.3066, 5.0071, 3.6448, 3.9000, 3.7862, 2.6066, 2.9087], + device='cuda:0'), covar=tensor([0.0848, 0.2908, 0.0369, 0.1023, 0.1716, 0.1465, 0.2711, 0.2446], + device='cuda:0'), in_proj_covar=tensor([0.0353, 0.0393, 0.0352, 0.0293, 0.0376, 0.0387, 0.0383, 0.0356], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 12:46:38,652 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9972, 3.8765, 4.2497, 3.8663, 3.6669, 4.1126, 3.9472, 4.2963], + device='cuda:0'), covar=tensor([0.0753, 0.0382, 0.0372, 0.0389, 0.1085, 0.0569, 0.0526, 0.0365], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0228, 0.0228, 0.0241, 0.0211, 0.0253, 0.0241, 0.0225], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 12:47:09,789 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72190.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:47:10,944 INFO [train.py:892] (0/4) Epoch 39, batch 1700, loss[loss=0.164, simple_loss=0.243, pruned_loss=0.04248, over 19702.00 frames. ], tot_loss[loss=0.1522, simple_loss=0.2332, pruned_loss=0.03561, over 3949886.81 frames. ], batch size: 60, lr: 3.98e-03, grad_scale: 8.0 +2023-03-29 12:48:41,500 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.323e+02 3.706e+02 4.408e+02 5.454e+02 1.029e+03, threshold=8.816e+02, percent-clipped=3.0 +2023-03-29 12:49:01,615 INFO [train.py:892] (0/4) Epoch 39, batch 1750, loss[loss=0.1512, simple_loss=0.2398, pruned_loss=0.03135, over 19914.00 frames. ], tot_loss[loss=0.1523, simple_loss=0.2331, pruned_loss=0.03572, over 3950650.32 frames. ], batch size: 45, lr: 3.98e-03, grad_scale: 8.0 +2023-03-29 12:50:03,069 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72272.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:50:08,992 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72275.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:50:29,203 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6988, 3.5786, 3.8873, 2.9878, 4.0868, 3.2596, 3.5944, 4.0536], + device='cuda:0'), covar=tensor([0.0778, 0.0353, 0.0506, 0.0792, 0.0468, 0.0441, 0.0478, 0.0305], + device='cuda:0'), in_proj_covar=tensor([0.0083, 0.0092, 0.0089, 0.0114, 0.0085, 0.0088, 0.0085, 0.0079], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 12:50:40,131 INFO [train.py:892] (0/4) Epoch 39, batch 1800, loss[loss=0.1475, simple_loss=0.2284, pruned_loss=0.03326, over 19897.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2338, pruned_loss=0.03634, over 3949765.13 frames. ], batch size: 87, lr: 3.98e-03, grad_scale: 8.0 +2023-03-29 12:51:36,782 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72320.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:51:37,038 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6865, 3.4507, 3.7234, 2.4640, 3.9317, 3.1866, 3.3662, 3.9666], + device='cuda:0'), covar=tensor([0.0667, 0.0443, 0.0479, 0.1119, 0.0385, 0.0468, 0.0552, 0.0257], + device='cuda:0'), in_proj_covar=tensor([0.0083, 0.0092, 0.0089, 0.0115, 0.0085, 0.0088, 0.0085, 0.0079], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 12:51:47,471 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72326.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:51:57,375 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.312e+02 3.449e+02 3.996e+02 5.050e+02 9.624e+02, threshold=7.991e+02, percent-clipped=1.0 +2023-03-29 12:52:15,294 INFO [train.py:892] (0/4) Epoch 39, batch 1850, loss[loss=0.1597, simple_loss=0.2489, pruned_loss=0.03525, over 19844.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2355, pruned_loss=0.03635, over 3946714.94 frames. ], batch size: 57, lr: 3.98e-03, grad_scale: 8.0 +2023-03-29 12:52:16,152 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2272, 3.4064, 2.9277, 2.5952, 3.0338, 3.3208, 3.4115, 3.3412], + device='cuda:0'), covar=tensor([0.0274, 0.0310, 0.0320, 0.0494, 0.0340, 0.0315, 0.0198, 0.0219], + device='cuda:0'), in_proj_covar=tensor([0.0113, 0.0106, 0.0107, 0.0108, 0.0111, 0.0095, 0.0096, 0.0095], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 12:52:23,034 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-39.pt +2023-03-29 12:53:18,917 INFO [train.py:892] (0/4) Epoch 40, batch 0, loss[loss=0.1359, simple_loss=0.2132, pruned_loss=0.02934, over 19872.00 frames. ], tot_loss[loss=0.1359, simple_loss=0.2132, pruned_loss=0.02934, over 19872.00 frames. ], batch size: 138, lr: 3.93e-03, grad_scale: 8.0 +2023-03-29 12:53:18,918 INFO [train.py:917] (0/4) Computing validation loss +2023-03-29 12:53:42,823 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5818, 2.6077, 4.0360, 3.0643, 3.3596, 3.0089, 2.3215, 2.3767], + device='cuda:0'), covar=tensor([0.1351, 0.3508, 0.0619, 0.1224, 0.2028, 0.1855, 0.3024, 0.3368], + device='cuda:0'), in_proj_covar=tensor([0.0354, 0.0394, 0.0352, 0.0294, 0.0377, 0.0388, 0.0384, 0.0357], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 12:53:44,833 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7787, 3.3360, 3.6246, 3.1085, 3.7643, 3.7537, 4.5332, 5.0124], + device='cuda:0'), covar=tensor([0.0410, 0.1714, 0.1357, 0.2277, 0.1613, 0.1574, 0.0503, 0.0391], + device='cuda:0'), in_proj_covar=tensor([0.0260, 0.0245, 0.0274, 0.0260, 0.0307, 0.0265, 0.0239, 0.0267], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 12:53:52,920 INFO [train.py:926] (0/4) Epoch 40, validation: loss=0.1851, simple_loss=0.2491, pruned_loss=0.0605, over 2883724.00 frames. +2023-03-29 12:53:52,922 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-29 12:54:43,560 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.81 vs. limit=5.0 +2023-03-29 12:55:00,981 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3932, 4.3635, 4.7595, 4.5073, 4.6882, 4.3057, 4.5218, 4.2700], + device='cuda:0'), covar=tensor([0.1533, 0.1929, 0.0949, 0.1411, 0.0856, 0.1042, 0.1931, 0.2193], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0346, 0.0381, 0.0313, 0.0287, 0.0294, 0.0372, 0.0401], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 12:55:30,598 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72387.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:55:53,220 INFO [train.py:892] (0/4) Epoch 40, batch 50, loss[loss=0.1401, simple_loss=0.2206, pruned_loss=0.02983, over 19681.00 frames. ], tot_loss[loss=0.1448, simple_loss=0.2247, pruned_loss=0.03244, over 892300.80 frames. ], batch size: 75, lr: 3.92e-03, grad_scale: 8.0 +2023-03-29 12:57:04,559 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3765, 4.8894, 4.9806, 4.7163, 5.2956, 3.2318, 4.2226, 2.4467], + device='cuda:0'), covar=tensor([0.0157, 0.0189, 0.0144, 0.0177, 0.0133, 0.0993, 0.0895, 0.1635], + device='cuda:0'), in_proj_covar=tensor([0.0108, 0.0151, 0.0116, 0.0137, 0.0122, 0.0137, 0.0145, 0.0130], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 12:57:12,957 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.328e+02 3.429e+02 4.154e+02 4.820e+02 1.115e+03, threshold=8.307e+02, percent-clipped=2.0 +2023-03-29 12:57:46,513 INFO [train.py:892] (0/4) Epoch 40, batch 100, loss[loss=0.1312, simple_loss=0.2058, pruned_loss=0.02825, over 19883.00 frames. ], tot_loss[loss=0.1503, simple_loss=0.2308, pruned_loss=0.03485, over 1570201.62 frames. ], batch size: 110, lr: 3.92e-03, grad_scale: 8.0 +2023-03-29 12:58:34,216 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-29 12:59:12,832 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-29 12:59:14,213 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72485.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 12:59:37,246 INFO [train.py:892] (0/4) Epoch 40, batch 150, loss[loss=0.1501, simple_loss=0.2279, pruned_loss=0.03617, over 19877.00 frames. ], tot_loss[loss=0.1511, simple_loss=0.2312, pruned_loss=0.03548, over 2097514.53 frames. ], batch size: 139, lr: 3.92e-03, grad_scale: 8.0 +2023-03-29 13:00:58,606 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.053e+02 3.352e+02 3.945e+02 4.917e+02 7.870e+02, threshold=7.891e+02, percent-clipped=0.0 +2023-03-29 13:01:01,931 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9058, 2.9279, 1.9015, 3.4055, 3.1426, 3.2976, 3.4140, 2.7172], + device='cuda:0'), covar=tensor([0.0717, 0.0748, 0.1595, 0.0725, 0.0685, 0.0532, 0.0669, 0.0920], + device='cuda:0'), in_proj_covar=tensor([0.0150, 0.0151, 0.0147, 0.0161, 0.0139, 0.0144, 0.0155, 0.0153], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 13:01:36,362 INFO [train.py:892] (0/4) Epoch 40, batch 200, loss[loss=0.1496, simple_loss=0.2271, pruned_loss=0.03604, over 19782.00 frames. ], tot_loss[loss=0.1511, simple_loss=0.2315, pruned_loss=0.03533, over 2509427.41 frames. ], batch size: 213, lr: 3.92e-03, grad_scale: 8.0 +2023-03-29 13:02:44,201 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72575.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:03:24,720 INFO [train.py:892] (0/4) Epoch 40, batch 250, loss[loss=0.1457, simple_loss=0.2297, pruned_loss=0.0308, over 19761.00 frames. ], tot_loss[loss=0.1501, simple_loss=0.2306, pruned_loss=0.03476, over 2828457.66 frames. ], batch size: 70, lr: 3.92e-03, grad_scale: 8.0 +2023-03-29 13:04:28,473 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72623.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:04:44,498 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.269e+02 3.429e+02 3.885e+02 4.899e+02 8.561e+02, threshold=7.769e+02, percent-clipped=1.0 +2023-03-29 13:05:16,290 INFO [train.py:892] (0/4) Epoch 40, batch 300, loss[loss=0.139, simple_loss=0.2133, pruned_loss=0.03233, over 19872.00 frames. ], tot_loss[loss=0.1507, simple_loss=0.2314, pruned_loss=0.03503, over 3077765.67 frames. ], batch size: 138, lr: 3.92e-03, grad_scale: 8.0 +2023-03-29 13:06:40,535 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72682.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:07:13,656 INFO [train.py:892] (0/4) Epoch 40, batch 350, loss[loss=0.143, simple_loss=0.2259, pruned_loss=0.03008, over 19648.00 frames. ], tot_loss[loss=0.1511, simple_loss=0.2316, pruned_loss=0.0353, over 3271438.99 frames. ], batch size: 72, lr: 3.92e-03, grad_scale: 8.0 +2023-03-29 13:08:29,523 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.535e+02 3.481e+02 4.095e+02 5.199e+02 1.202e+03, threshold=8.190e+02, percent-clipped=3.0 +2023-03-29 13:08:53,006 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-29 13:09:02,636 INFO [train.py:892] (0/4) Epoch 40, batch 400, loss[loss=0.1419, simple_loss=0.2321, pruned_loss=0.02586, over 19825.00 frames. ], tot_loss[loss=0.1514, simple_loss=0.2321, pruned_loss=0.03536, over 3420705.66 frames. ], batch size: 57, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:09:03,548 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0102, 4.8175, 5.3401, 4.8431, 4.2935, 5.0930, 4.9832, 5.4995], + device='cuda:0'), covar=tensor([0.0764, 0.0349, 0.0351, 0.0344, 0.0831, 0.0492, 0.0437, 0.0319], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0228, 0.0228, 0.0241, 0.0211, 0.0253, 0.0242, 0.0226], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 13:09:05,831 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1617, 2.8723, 3.2660, 3.3994, 3.8347, 4.4378, 4.2253, 4.3004], + device='cuda:0'), covar=tensor([0.0915, 0.1487, 0.1285, 0.0640, 0.0423, 0.0216, 0.0322, 0.0381], + device='cuda:0'), in_proj_covar=tensor([0.0166, 0.0171, 0.0184, 0.0156, 0.0143, 0.0137, 0.0131, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 13:10:20,312 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.4811, 4.9456, 5.0198, 4.7892, 5.4207, 3.3797, 4.2375, 3.0110], + device='cuda:0'), covar=tensor([0.0160, 0.0214, 0.0140, 0.0187, 0.0138, 0.0919, 0.0912, 0.1307], + device='cuda:0'), in_proj_covar=tensor([0.0108, 0.0151, 0.0117, 0.0139, 0.0123, 0.0138, 0.0146, 0.0131], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-29 13:10:34,168 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72785.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:10:56,581 INFO [train.py:892] (0/4) Epoch 40, batch 450, loss[loss=0.1382, simple_loss=0.2213, pruned_loss=0.02753, over 19878.00 frames. ], tot_loss[loss=0.1528, simple_loss=0.2338, pruned_loss=0.03586, over 3537224.28 frames. ], batch size: 47, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:11:20,906 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7203, 2.9703, 3.2110, 3.5811, 2.4968, 2.9786, 2.5183, 2.3744], + device='cuda:0'), covar=tensor([0.0632, 0.1729, 0.1051, 0.0562, 0.2162, 0.1042, 0.1419, 0.1752], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0327, 0.0251, 0.0208, 0.0250, 0.0213, 0.0222, 0.0219], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 13:12:04,971 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.8572, 6.1231, 6.1829, 5.9891, 5.9371, 6.1249, 5.4463, 5.4665], + device='cuda:0'), covar=tensor([0.0386, 0.0417, 0.0411, 0.0417, 0.0471, 0.0504, 0.0689, 0.0937], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0303, 0.0312, 0.0273, 0.0281, 0.0262, 0.0279, 0.0325], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 13:12:17,044 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.344e+02 3.525e+02 4.138e+02 5.025e+02 1.485e+03, threshold=8.275e+02, percent-clipped=3.0 +2023-03-29 13:12:23,692 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72833.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:12:56,046 INFO [train.py:892] (0/4) Epoch 40, batch 500, loss[loss=0.1435, simple_loss=0.2141, pruned_loss=0.03651, over 19827.00 frames. ], tot_loss[loss=0.152, simple_loss=0.2328, pruned_loss=0.03562, over 3628848.43 frames. ], batch size: 121, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:14:11,155 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.7679, 1.6927, 1.8591, 1.8592, 1.7800, 1.7860, 1.6909, 1.8519], + device='cuda:0'), covar=tensor([0.0446, 0.0398, 0.0364, 0.0354, 0.0533, 0.0423, 0.0532, 0.0374], + device='cuda:0'), in_proj_covar=tensor([0.0094, 0.0088, 0.0090, 0.0085, 0.0098, 0.0090, 0.0107, 0.0079], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 13:14:44,530 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72894.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:14:47,528 INFO [train.py:892] (0/4) Epoch 40, batch 550, loss[loss=0.1657, simple_loss=0.2467, pruned_loss=0.04235, over 19762.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2339, pruned_loss=0.0364, over 3699427.47 frames. ], batch size: 163, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:16:09,032 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.263e+02 3.703e+02 4.460e+02 5.204e+02 8.287e+02, threshold=8.920e+02, percent-clipped=1.0 +2023-03-29 13:16:45,778 INFO [train.py:892] (0/4) Epoch 40, batch 600, loss[loss=0.1585, simple_loss=0.2347, pruned_loss=0.04116, over 19818.00 frames. ], tot_loss[loss=0.153, simple_loss=0.2335, pruned_loss=0.0363, over 3755468.53 frames. ], batch size: 121, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:17:06,961 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72955.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:17:26,606 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72964.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 13:18:08,824 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72982.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:18:38,514 INFO [train.py:892] (0/4) Epoch 40, batch 650, loss[loss=0.1838, simple_loss=0.2615, pruned_loss=0.0531, over 19790.00 frames. ], tot_loss[loss=0.1525, simple_loss=0.2326, pruned_loss=0.03613, over 3799423.02 frames. ], batch size: 40, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:19:43,158 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73025.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 13:19:45,452 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6129, 4.6799, 2.7370, 4.8846, 5.1155, 2.2213, 4.3814, 3.6958], + device='cuda:0'), covar=tensor([0.0568, 0.0639, 0.2632, 0.0619, 0.0522, 0.2596, 0.0785, 0.0826], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0268, 0.0238, 0.0286, 0.0268, 0.0209, 0.0246, 0.0208], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 13:19:55,719 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73030.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:19:57,046 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.367e+02 3.602e+02 4.054e+02 4.696e+02 9.081e+02, threshold=8.108e+02, percent-clipped=1.0 +2023-03-29 13:20:13,286 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7963, 4.5560, 4.5815, 4.8147, 4.5869, 4.9657, 4.8889, 5.1116], + device='cuda:0'), covar=tensor([0.0672, 0.0394, 0.0425, 0.0340, 0.0693, 0.0452, 0.0443, 0.0262], + device='cuda:0'), in_proj_covar=tensor([0.0158, 0.0184, 0.0206, 0.0182, 0.0181, 0.0165, 0.0157, 0.0204], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 13:20:29,390 INFO [train.py:892] (0/4) Epoch 40, batch 700, loss[loss=0.1427, simple_loss=0.2226, pruned_loss=0.03138, over 19857.00 frames. ], tot_loss[loss=0.1517, simple_loss=0.2323, pruned_loss=0.03559, over 3834069.53 frames. ], batch size: 56, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:22:25,644 INFO [train.py:892] (0/4) Epoch 40, batch 750, loss[loss=0.155, simple_loss=0.2389, pruned_loss=0.03555, over 19656.00 frames. ], tot_loss[loss=0.1519, simple_loss=0.2326, pruned_loss=0.03556, over 3859360.05 frames. ], batch size: 69, lr: 3.91e-03, grad_scale: 8.0 +2023-03-29 13:22:52,188 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2935, 2.4159, 3.7441, 2.8749, 3.0293, 2.8107, 2.2052, 2.3725], + device='cuda:0'), covar=tensor([0.1448, 0.3725, 0.0715, 0.1392, 0.2348, 0.2058, 0.3092, 0.2961], + device='cuda:0'), in_proj_covar=tensor([0.0357, 0.0399, 0.0356, 0.0297, 0.0382, 0.0391, 0.0387, 0.0360], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 13:23:43,969 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.740e+02 3.647e+02 4.174e+02 4.953e+02 1.017e+03, threshold=8.348e+02, percent-clipped=2.0 +2023-03-29 13:24:07,037 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-03-29 13:24:18,110 INFO [train.py:892] (0/4) Epoch 40, batch 800, loss[loss=0.1602, simple_loss=0.2262, pruned_loss=0.04709, over 19738.00 frames. ], tot_loss[loss=0.1519, simple_loss=0.2327, pruned_loss=0.03556, over 3880335.52 frames. ], batch size: 134, lr: 3.90e-03, grad_scale: 8.0 +2023-03-29 13:25:14,382 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4812, 2.8196, 2.4435, 1.9310, 2.5248, 2.7988, 2.6635, 2.7589], + device='cuda:0'), covar=tensor([0.0419, 0.0313, 0.0362, 0.0622, 0.0403, 0.0276, 0.0317, 0.0262], + device='cuda:0'), in_proj_covar=tensor([0.0113, 0.0106, 0.0107, 0.0107, 0.0110, 0.0095, 0.0096, 0.0095], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 13:25:54,954 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7518, 2.1794, 2.5569, 2.9826, 3.3994, 3.5674, 3.5147, 3.5267], + device='cuda:0'), covar=tensor([0.1003, 0.1700, 0.1426, 0.0711, 0.0460, 0.0324, 0.0401, 0.0466], + device='cuda:0'), in_proj_covar=tensor([0.0165, 0.0171, 0.0184, 0.0156, 0.0143, 0.0138, 0.0131, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 13:26:08,379 INFO [train.py:892] (0/4) Epoch 40, batch 850, loss[loss=0.1492, simple_loss=0.2384, pruned_loss=0.02995, over 19610.00 frames. ], tot_loss[loss=0.1519, simple_loss=0.2328, pruned_loss=0.03549, over 3895626.19 frames. ], batch size: 48, lr: 3.90e-03, grad_scale: 8.0 +2023-03-29 13:26:18,647 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3997, 3.5239, 2.1830, 4.0693, 3.7330, 4.0289, 4.1140, 3.2743], + device='cuda:0'), covar=tensor([0.0644, 0.0643, 0.1683, 0.0616, 0.0608, 0.0412, 0.0547, 0.0802], + device='cuda:0'), in_proj_covar=tensor([0.0150, 0.0151, 0.0147, 0.0161, 0.0139, 0.0144, 0.0155, 0.0153], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 13:27:30,139 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.270e+02 3.428e+02 4.036e+02 4.943e+02 8.954e+02, threshold=8.072e+02, percent-clipped=2.0 +2023-03-29 13:27:35,282 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3691, 3.0811, 3.3846, 2.9662, 3.6140, 3.5003, 4.0994, 4.5839], + device='cuda:0'), covar=tensor([0.0494, 0.1661, 0.1440, 0.2057, 0.1488, 0.1409, 0.0619, 0.0458], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0247, 0.0275, 0.0260, 0.0305, 0.0265, 0.0240, 0.0267], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 13:28:06,253 INFO [train.py:892] (0/4) Epoch 40, batch 900, loss[loss=0.1547, simple_loss=0.2389, pruned_loss=0.03523, over 19762.00 frames. ], tot_loss[loss=0.1514, simple_loss=0.2322, pruned_loss=0.03533, over 3907574.32 frames. ], batch size: 263, lr: 3.90e-03, grad_scale: 8.0 +2023-03-29 13:28:17,286 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73250.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:28:32,222 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6338, 3.5158, 3.8596, 3.4921, 3.3550, 3.8117, 3.6219, 3.9254], + device='cuda:0'), covar=tensor([0.0815, 0.0396, 0.0401, 0.0444, 0.1350, 0.0608, 0.0534, 0.0422], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0230, 0.0229, 0.0242, 0.0212, 0.0254, 0.0243, 0.0228], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 13:28:36,611 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9151, 3.9664, 2.4177, 4.1101, 4.3291, 1.9724, 3.6049, 3.2556], + device='cuda:0'), covar=tensor([0.0739, 0.0844, 0.2780, 0.0881, 0.0555, 0.2824, 0.1058, 0.0931], + device='cuda:0'), in_proj_covar=tensor([0.0240, 0.0266, 0.0236, 0.0285, 0.0265, 0.0207, 0.0244, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 13:29:53,849 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73293.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:29:59,525 INFO [train.py:892] (0/4) Epoch 40, batch 950, loss[loss=0.2038, simple_loss=0.2889, pruned_loss=0.05929, over 19603.00 frames. ], tot_loss[loss=0.1526, simple_loss=0.2339, pruned_loss=0.03568, over 3914750.98 frames. ], batch size: 367, lr: 3.90e-03, grad_scale: 8.0 +2023-03-29 13:30:56,306 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73320.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 13:31:21,659 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.304e+02 3.673e+02 4.396e+02 5.131e+02 1.124e+03, threshold=8.792e+02, percent-clipped=1.0 +2023-03-29 13:31:53,785 INFO [train.py:892] (0/4) Epoch 40, batch 1000, loss[loss=0.1558, simple_loss=0.2374, pruned_loss=0.03715, over 19696.00 frames. ], tot_loss[loss=0.1524, simple_loss=0.2333, pruned_loss=0.03577, over 3922843.30 frames. ], batch size: 265, lr: 3.90e-03, grad_scale: 8.0 +2023-03-29 13:32:15,105 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73354.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:32:17,360 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1595, 4.2641, 2.6249, 4.4143, 4.6280, 2.0326, 3.7871, 3.5020], + device='cuda:0'), covar=tensor([0.0627, 0.0747, 0.2613, 0.0685, 0.0468, 0.2786, 0.1161, 0.0818], + device='cuda:0'), in_proj_covar=tensor([0.0240, 0.0265, 0.0236, 0.0285, 0.0266, 0.0208, 0.0244, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 13:33:49,235 INFO [train.py:892] (0/4) Epoch 40, batch 1050, loss[loss=0.1626, simple_loss=0.2437, pruned_loss=0.04072, over 19942.00 frames. ], tot_loss[loss=0.1528, simple_loss=0.234, pruned_loss=0.03583, over 3929192.88 frames. ], batch size: 52, lr: 3.90e-03, grad_scale: 8.0 +2023-03-29 13:34:58,632 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.5551, 5.7855, 5.8533, 5.7185, 5.6276, 5.8384, 5.1771, 5.2289], + device='cuda:0'), covar=tensor([0.0425, 0.0475, 0.0486, 0.0429, 0.0564, 0.0470, 0.0698, 0.0922], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0305, 0.0312, 0.0273, 0.0283, 0.0262, 0.0279, 0.0325], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 13:35:09,648 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.437e+02 3.473e+02 4.249e+02 4.984e+02 7.164e+02, threshold=8.499e+02, percent-clipped=0.0 +2023-03-29 13:35:15,725 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5047, 2.7765, 4.7084, 4.0713, 4.3990, 4.6515, 4.4579, 4.3560], + device='cuda:0'), covar=tensor([0.0591, 0.1002, 0.0102, 0.0830, 0.0150, 0.0213, 0.0167, 0.0162], + device='cuda:0'), in_proj_covar=tensor([0.0103, 0.0106, 0.0091, 0.0153, 0.0089, 0.0102, 0.0092, 0.0089], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 13:35:39,515 INFO [train.py:892] (0/4) Epoch 40, batch 1100, loss[loss=0.1466, simple_loss=0.2311, pruned_loss=0.03109, over 19781.00 frames. ], tot_loss[loss=0.153, simple_loss=0.2343, pruned_loss=0.03588, over 3933327.33 frames. ], batch size: 53, lr: 3.90e-03, grad_scale: 16.0 +2023-03-29 13:36:38,007 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0299, 3.3392, 3.5429, 3.9884, 2.8333, 3.3035, 2.6696, 2.6161], + device='cuda:0'), covar=tensor([0.0535, 0.1915, 0.0990, 0.0419, 0.1998, 0.0926, 0.1296, 0.1712], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0329, 0.0253, 0.0209, 0.0252, 0.0214, 0.0224, 0.0221], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 13:36:53,321 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0747, 3.0233, 4.7228, 3.4772, 3.7534, 3.4568, 2.5442, 2.7235], + device='cuda:0'), covar=tensor([0.1114, 0.3375, 0.0470, 0.1195, 0.1749, 0.1841, 0.2793, 0.2860], + device='cuda:0'), in_proj_covar=tensor([0.0359, 0.0400, 0.0357, 0.0297, 0.0382, 0.0393, 0.0389, 0.0361], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 13:37:30,725 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-29 13:37:31,464 INFO [train.py:892] (0/4) Epoch 40, batch 1150, loss[loss=0.1412, simple_loss=0.2228, pruned_loss=0.02982, over 19859.00 frames. ], tot_loss[loss=0.1517, simple_loss=0.2325, pruned_loss=0.03547, over 3938248.18 frames. ], batch size: 112, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:38:04,352 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0108, 2.8731, 3.1639, 2.7448, 3.2709, 3.2217, 3.8571, 4.2342], + device='cuda:0'), covar=tensor([0.0614, 0.1753, 0.1572, 0.2308, 0.1695, 0.1546, 0.0662, 0.0528], + device='cuda:0'), in_proj_covar=tensor([0.0263, 0.0249, 0.0277, 0.0262, 0.0308, 0.0267, 0.0242, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 13:38:51,535 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.496e+02 3.768e+02 4.402e+02 5.430e+02 9.903e+02, threshold=8.803e+02, percent-clipped=3.0 +2023-03-29 13:39:27,923 INFO [train.py:892] (0/4) Epoch 40, batch 1200, loss[loss=0.1298, simple_loss=0.215, pruned_loss=0.0223, over 19870.00 frames. ], tot_loss[loss=0.1521, simple_loss=0.2327, pruned_loss=0.03573, over 3941388.51 frames. ], batch size: 92, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:39:37,109 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73550.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:40:59,027 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4151, 2.6199, 3.7752, 3.0147, 3.1366, 2.9792, 2.2261, 2.3783], + device='cuda:0'), covar=tensor([0.1239, 0.3031, 0.0621, 0.1124, 0.1908, 0.1643, 0.2709, 0.2775], + device='cuda:0'), in_proj_covar=tensor([0.0356, 0.0397, 0.0355, 0.0296, 0.0380, 0.0391, 0.0386, 0.0359], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 13:41:06,762 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73590.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:41:18,981 INFO [train.py:892] (0/4) Epoch 40, batch 1250, loss[loss=0.1401, simple_loss=0.2136, pruned_loss=0.03332, over 19840.00 frames. ], tot_loss[loss=0.1509, simple_loss=0.2312, pruned_loss=0.03526, over 3944445.15 frames. ], batch size: 160, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:41:23,968 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73598.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:42:14,282 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73620.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 13:42:38,866 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.357e+02 3.537e+02 4.071e+02 5.247e+02 9.229e+02, threshold=8.143e+02, percent-clipped=1.0 +2023-03-29 13:43:11,854 INFO [train.py:892] (0/4) Epoch 40, batch 1300, loss[loss=0.1537, simple_loss=0.2415, pruned_loss=0.03292, over 19608.00 frames. ], tot_loss[loss=0.1517, simple_loss=0.2326, pruned_loss=0.03535, over 3944662.31 frames. ], batch size: 48, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:43:18,839 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73649.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:43:23,086 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73651.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:44:03,512 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73668.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 13:45:04,589 INFO [train.py:892] (0/4) Epoch 40, batch 1350, loss[loss=0.1398, simple_loss=0.2115, pruned_loss=0.034, over 19887.00 frames. ], tot_loss[loss=0.1516, simple_loss=0.2327, pruned_loss=0.03525, over 3946252.34 frames. ], batch size: 47, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:45:19,543 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2789, 3.3220, 3.4843, 3.4334, 3.2417, 3.3653, 3.0645, 3.4223], + device='cuda:0'), covar=tensor([0.0256, 0.0332, 0.0286, 0.0275, 0.0349, 0.0266, 0.0412, 0.0410], + device='cuda:0'), in_proj_covar=tensor([0.0095, 0.0089, 0.0091, 0.0086, 0.0099, 0.0092, 0.0108, 0.0080], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 13:45:48,773 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8338, 2.4835, 2.8188, 3.0744, 3.4595, 3.7163, 3.6170, 3.6077], + device='cuda:0'), covar=tensor([0.1012, 0.1538, 0.1260, 0.0733, 0.0501, 0.0277, 0.0459, 0.0536], + device='cuda:0'), in_proj_covar=tensor([0.0166, 0.0171, 0.0183, 0.0156, 0.0143, 0.0137, 0.0131, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 13:46:21,788 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.112e+02 3.360e+02 4.018e+02 4.695e+02 9.172e+02, threshold=8.036e+02, percent-clipped=1.0 +2023-03-29 13:46:24,613 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73732.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:46:56,658 INFO [train.py:892] (0/4) Epoch 40, batch 1400, loss[loss=0.1893, simple_loss=0.2976, pruned_loss=0.04053, over 18948.00 frames. ], tot_loss[loss=0.1519, simple_loss=0.2327, pruned_loss=0.03553, over 3946528.78 frames. ], batch size: 514, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:48:43,146 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3461, 4.9284, 4.9838, 4.7461, 5.3159, 3.3752, 4.3252, 2.7294], + device='cuda:0'), covar=tensor([0.0210, 0.0210, 0.0164, 0.0204, 0.0149, 0.0991, 0.0872, 0.1521], + device='cuda:0'), in_proj_covar=tensor([0.0108, 0.0152, 0.0117, 0.0138, 0.0122, 0.0138, 0.0145, 0.0131], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 13:48:43,218 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73793.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:48:49,028 INFO [train.py:892] (0/4) Epoch 40, batch 1450, loss[loss=0.1477, simple_loss=0.2278, pruned_loss=0.03381, over 19769.00 frames. ], tot_loss[loss=0.1525, simple_loss=0.2334, pruned_loss=0.03577, over 3948547.09 frames. ], batch size: 40, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:49:11,240 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-29 13:50:11,120 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.549e+02 3.468e+02 4.023e+02 4.768e+02 7.367e+02, threshold=8.047e+02, percent-clipped=0.0 +2023-03-29 13:50:45,943 INFO [train.py:892] (0/4) Epoch 40, batch 1500, loss[loss=0.1471, simple_loss=0.2197, pruned_loss=0.03721, over 19827.00 frames. ], tot_loss[loss=0.1535, simple_loss=0.235, pruned_loss=0.03597, over 3945456.12 frames. ], batch size: 127, lr: 3.89e-03, grad_scale: 16.0 +2023-03-29 13:50:58,903 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-29 13:52:34,845 INFO [train.py:892] (0/4) Epoch 40, batch 1550, loss[loss=0.1871, simple_loss=0.2693, pruned_loss=0.05246, over 19651.00 frames. ], tot_loss[loss=0.1534, simple_loss=0.2346, pruned_loss=0.03606, over 3944733.34 frames. ], batch size: 330, lr: 3.88e-03, grad_scale: 16.0 +2023-03-29 13:53:51,995 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.479e+02 3.617e+02 4.156e+02 5.212e+02 8.023e+02, threshold=8.312e+02, percent-clipped=0.0 +2023-03-29 13:54:26,935 INFO [train.py:892] (0/4) Epoch 40, batch 1600, loss[loss=0.1522, simple_loss=0.2318, pruned_loss=0.03632, over 19852.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2348, pruned_loss=0.03595, over 3946300.31 frames. ], batch size: 60, lr: 3.88e-03, grad_scale: 16.0 +2023-03-29 13:54:27,676 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73946.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:54:33,706 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73949.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:54:49,053 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-03-29 13:56:21,332 INFO [train.py:892] (0/4) Epoch 40, batch 1650, loss[loss=0.1444, simple_loss=0.2257, pruned_loss=0.03154, over 19795.00 frames. ], tot_loss[loss=0.152, simple_loss=0.2333, pruned_loss=0.03537, over 3947631.08 frames. ], batch size: 51, lr: 3.88e-03, grad_scale: 16.0 +2023-03-29 13:56:25,672 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73997.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:56:32,671 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-74000.pt +2023-03-29 13:57:41,959 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9505, 4.1395, 4.1718, 4.0318, 3.9558, 4.1103, 3.7123, 3.7212], + device='cuda:0'), covar=tensor([0.0597, 0.0548, 0.0529, 0.0516, 0.0698, 0.0572, 0.0698, 0.1084], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0306, 0.0316, 0.0276, 0.0285, 0.0267, 0.0281, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 13:57:48,765 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.164e+02 3.493e+02 4.073e+02 5.082e+02 8.769e+02, threshold=8.147e+02, percent-clipped=1.0 +2023-03-29 13:58:18,997 INFO [train.py:892] (0/4) Epoch 40, batch 1700, loss[loss=0.142, simple_loss=0.2228, pruned_loss=0.03061, over 19840.00 frames. ], tot_loss[loss=0.1532, simple_loss=0.2345, pruned_loss=0.03589, over 3947561.28 frames. ], batch size: 161, lr: 3.88e-03, grad_scale: 16.0 +2023-03-29 13:58:59,849 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74064.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 13:59:50,932 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74088.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:00:06,306 INFO [train.py:892] (0/4) Epoch 40, batch 1750, loss[loss=0.143, simple_loss=0.2199, pruned_loss=0.03306, over 19658.00 frames. ], tot_loss[loss=0.1529, simple_loss=0.2342, pruned_loss=0.03577, over 3948604.15 frames. ], batch size: 67, lr: 3.88e-03, grad_scale: 16.0 +2023-03-29 14:01:03,994 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74125.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:01:13,847 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.104e+02 3.603e+02 4.226e+02 5.080e+02 8.016e+02, threshold=8.451e+02, percent-clipped=0.0 +2023-03-29 14:01:43,817 INFO [train.py:892] (0/4) Epoch 40, batch 1800, loss[loss=0.1488, simple_loss=0.2264, pruned_loss=0.03563, over 19805.00 frames. ], tot_loss[loss=0.1531, simple_loss=0.2342, pruned_loss=0.03595, over 3948624.73 frames. ], batch size: 132, lr: 3.88e-03, grad_scale: 16.0 +2023-03-29 14:01:57,667 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.66 vs. limit=2.0 +2023-03-29 14:02:05,010 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.85 vs. limit=5.0 +2023-03-29 14:03:18,059 INFO [train.py:892] (0/4) Epoch 40, batch 1850, loss[loss=0.1735, simple_loss=0.2695, pruned_loss=0.03876, over 19593.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.236, pruned_loss=0.03585, over 3947569.85 frames. ], batch size: 53, lr: 3.88e-03, grad_scale: 16.0 +2023-03-29 14:03:26,081 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-40.pt +2023-03-29 14:04:24,216 INFO [train.py:892] (0/4) Epoch 41, batch 0, loss[loss=0.1455, simple_loss=0.2223, pruned_loss=0.03431, over 19848.00 frames. ], tot_loss[loss=0.1455, simple_loss=0.2223, pruned_loss=0.03431, over 19848.00 frames. ], batch size: 56, lr: 3.83e-03, grad_scale: 16.0 +2023-03-29 14:04:24,217 INFO [train.py:917] (0/4) Computing validation loss +2023-03-29 14:04:45,291 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7558, 2.4158, 2.9570, 2.6058, 3.1499, 3.2762, 2.9465, 3.1517], + device='cuda:0'), covar=tensor([0.0579, 0.0869, 0.0138, 0.0350, 0.0155, 0.0242, 0.0217, 0.0224], + device='cuda:0'), in_proj_covar=tensor([0.0104, 0.0106, 0.0091, 0.0153, 0.0090, 0.0102, 0.0093, 0.0090], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 14:04:57,765 INFO [train.py:926] (0/4) Epoch 41, validation: loss=0.1869, simple_loss=0.2502, pruned_loss=0.06181, over 2883724.00 frames. +2023-03-29 14:04:57,767 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-29 14:06:10,077 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.348e+02 3.328e+02 3.806e+02 4.731e+02 7.088e+02, threshold=7.612e+02, percent-clipped=0.0 +2023-03-29 14:06:30,730 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74240.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:06:46,785 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74246.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:06:57,524 INFO [train.py:892] (0/4) Epoch 41, batch 50, loss[loss=0.1509, simple_loss=0.2352, pruned_loss=0.03324, over 19762.00 frames. ], tot_loss[loss=0.1486, simple_loss=0.228, pruned_loss=0.03464, over 892846.79 frames. ], batch size: 198, lr: 3.83e-03, grad_scale: 16.0 +2023-03-29 14:07:16,008 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-29 14:08:04,697 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-03-29 14:08:22,568 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2584, 5.5332, 5.5887, 5.4626, 5.2726, 5.5394, 5.0356, 5.0212], + device='cuda:0'), covar=tensor([0.0420, 0.0428, 0.0404, 0.0413, 0.0538, 0.0449, 0.0620, 0.0908], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0307, 0.0316, 0.0276, 0.0284, 0.0267, 0.0281, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 14:08:35,440 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74294.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:08:49,414 INFO [train.py:892] (0/4) Epoch 41, batch 100, loss[loss=0.1331, simple_loss=0.2079, pruned_loss=0.02919, over 19822.00 frames. ], tot_loss[loss=0.1474, simple_loss=0.2278, pruned_loss=0.03355, over 1570446.64 frames. ], batch size: 187, lr: 3.83e-03, grad_scale: 16.0 +2023-03-29 14:08:50,571 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74301.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:09:54,192 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.051e+02 3.488e+02 4.125e+02 4.651e+02 7.521e+02, threshold=8.249e+02, percent-clipped=0.0 +2023-03-29 14:10:04,702 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-29 14:10:38,041 INFO [train.py:892] (0/4) Epoch 41, batch 150, loss[loss=0.1398, simple_loss=0.2167, pruned_loss=0.03151, over 19805.00 frames. ], tot_loss[loss=0.1492, simple_loss=0.2297, pruned_loss=0.03434, over 2096432.60 frames. ], batch size: 167, lr: 3.82e-03, grad_scale: 16.0 +2023-03-29 14:11:50,627 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8860, 2.4354, 4.0240, 3.6342, 3.9491, 4.0309, 3.8177, 3.6744], + device='cuda:0'), covar=tensor([0.0643, 0.1020, 0.0119, 0.0517, 0.0167, 0.0236, 0.0203, 0.0204], + device='cuda:0'), in_proj_covar=tensor([0.0104, 0.0106, 0.0091, 0.0153, 0.0090, 0.0102, 0.0093, 0.0090], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 14:12:02,996 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74388.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:12:35,712 INFO [train.py:892] (0/4) Epoch 41, batch 200, loss[loss=0.136, simple_loss=0.2138, pruned_loss=0.02908, over 19875.00 frames. ], tot_loss[loss=0.1503, simple_loss=0.2308, pruned_loss=0.03489, over 2507114.81 frames. ], batch size: 159, lr: 3.82e-03, grad_scale: 16.0 +2023-03-29 14:13:19,110 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74420.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:13:44,020 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.532e+02 3.589e+02 4.027e+02 4.762e+02 8.323e+02, threshold=8.054e+02, percent-clipped=1.0 +2023-03-29 14:13:54,992 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74436.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:14:27,138 INFO [train.py:892] (0/4) Epoch 41, batch 250, loss[loss=0.1912, simple_loss=0.2653, pruned_loss=0.05849, over 19806.00 frames. ], tot_loss[loss=0.1499, simple_loss=0.2302, pruned_loss=0.03485, over 2828273.23 frames. ], batch size: 126, lr: 3.82e-03, grad_scale: 16.0 +2023-03-29 14:14:39,980 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.9311, 6.2246, 6.2869, 6.1308, 6.0452, 6.2399, 5.5947, 5.6056], + device='cuda:0'), covar=tensor([0.0367, 0.0416, 0.0413, 0.0416, 0.0499, 0.0424, 0.0636, 0.0914], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0305, 0.0314, 0.0274, 0.0283, 0.0266, 0.0279, 0.0325], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 14:16:20,795 INFO [train.py:892] (0/4) Epoch 41, batch 300, loss[loss=0.1423, simple_loss=0.2147, pruned_loss=0.03491, over 19851.00 frames. ], tot_loss[loss=0.1505, simple_loss=0.2311, pruned_loss=0.03497, over 3075674.42 frames. ], batch size: 165, lr: 3.82e-03, grad_scale: 16.0 +2023-03-29 14:17:27,567 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.493e+02 3.756e+02 4.487e+02 5.432e+02 9.829e+02, threshold=8.974e+02, percent-clipped=6.0 +2023-03-29 14:18:14,841 INFO [train.py:892] (0/4) Epoch 41, batch 350, loss[loss=0.145, simple_loss=0.2283, pruned_loss=0.03092, over 19722.00 frames. ], tot_loss[loss=0.1498, simple_loss=0.231, pruned_loss=0.03435, over 3270351.17 frames. ], batch size: 80, lr: 3.82e-03, grad_scale: 16.0 +2023-03-29 14:19:56,171 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74595.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:19:59,630 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74596.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:20:06,679 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-29 14:20:09,424 INFO [train.py:892] (0/4) Epoch 41, batch 400, loss[loss=0.1397, simple_loss=0.2136, pruned_loss=0.03294, over 19774.00 frames. ], tot_loss[loss=0.1495, simple_loss=0.2307, pruned_loss=0.03413, over 3420915.74 frames. ], batch size: 217, lr: 3.82e-03, grad_scale: 16.0 +2023-03-29 14:20:44,904 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4536, 3.7344, 3.9746, 4.4461, 3.0108, 3.3832, 2.7893, 2.8238], + device='cuda:0'), covar=tensor([0.0442, 0.1718, 0.0774, 0.0393, 0.1881, 0.1001, 0.1317, 0.1582], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0328, 0.0253, 0.0211, 0.0252, 0.0215, 0.0223, 0.0219], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 14:20:56,843 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-03-29 14:21:17,934 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.495e+02 3.791e+02 4.431e+02 5.113e+02 8.392e+02, threshold=8.862e+02, percent-clipped=0.0 +2023-03-29 14:21:54,720 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7367, 3.3804, 3.6158, 3.1519, 4.0161, 4.1850, 4.4708, 5.0266], + device='cuda:0'), covar=tensor([0.0505, 0.1569, 0.1471, 0.2254, 0.1490, 0.1068, 0.0629, 0.0443], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0246, 0.0274, 0.0261, 0.0306, 0.0264, 0.0240, 0.0267], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 14:22:06,038 INFO [train.py:892] (0/4) Epoch 41, batch 450, loss[loss=0.1592, simple_loss=0.2363, pruned_loss=0.04101, over 19838.00 frames. ], tot_loss[loss=0.1503, simple_loss=0.2316, pruned_loss=0.03444, over 3538140.31 frames. ], batch size: 161, lr: 3.82e-03, grad_scale: 16.0 +2023-03-29 14:22:17,901 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74656.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:24:01,240 INFO [train.py:892] (0/4) Epoch 41, batch 500, loss[loss=0.1622, simple_loss=0.2556, pruned_loss=0.03436, over 19902.00 frames. ], tot_loss[loss=0.1507, simple_loss=0.2321, pruned_loss=0.03464, over 3628779.74 frames. ], batch size: 50, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:24:46,298 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74720.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:25:12,466 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.085e+02 3.392e+02 3.989e+02 4.649e+02 9.968e+02, threshold=7.978e+02, percent-clipped=1.0 +2023-03-29 14:25:55,601 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9891, 3.8526, 4.2489, 3.8655, 3.6015, 4.1382, 3.9654, 4.3142], + device='cuda:0'), covar=tensor([0.0762, 0.0360, 0.0357, 0.0392, 0.1183, 0.0546, 0.0490, 0.0341], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0229, 0.0228, 0.0240, 0.0212, 0.0253, 0.0241, 0.0227], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 14:25:56,878 INFO [train.py:892] (0/4) Epoch 41, batch 550, loss[loss=0.168, simple_loss=0.24, pruned_loss=0.04796, over 19778.00 frames. ], tot_loss[loss=0.1524, simple_loss=0.2335, pruned_loss=0.03565, over 3700449.50 frames. ], batch size: 191, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:26:35,901 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74768.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:27:02,411 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2233, 3.0029, 3.3092, 2.8462, 3.5032, 3.4230, 4.0481, 4.4973], + device='cuda:0'), covar=tensor([0.0603, 0.1625, 0.1640, 0.2296, 0.1694, 0.1519, 0.0648, 0.0497], + device='cuda:0'), in_proj_covar=tensor([0.0263, 0.0248, 0.0276, 0.0263, 0.0309, 0.0266, 0.0241, 0.0269], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 14:27:49,003 INFO [train.py:892] (0/4) Epoch 41, batch 600, loss[loss=0.1525, simple_loss=0.2317, pruned_loss=0.03663, over 19888.00 frames. ], tot_loss[loss=0.1529, simple_loss=0.2341, pruned_loss=0.0359, over 3756184.79 frames. ], batch size: 61, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:28:17,266 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5144, 2.6158, 4.8489, 4.1258, 4.5004, 4.7209, 4.5962, 4.3726], + device='cuda:0'), covar=tensor([0.0659, 0.1154, 0.0109, 0.0882, 0.0204, 0.0230, 0.0187, 0.0196], + device='cuda:0'), in_proj_covar=tensor([0.0104, 0.0106, 0.0091, 0.0153, 0.0090, 0.0103, 0.0094, 0.0090], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 14:28:53,130 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.538e+02 3.664e+02 4.346e+02 5.320e+02 9.628e+02, threshold=8.692e+02, percent-clipped=3.0 +2023-03-29 14:29:38,231 INFO [train.py:892] (0/4) Epoch 41, batch 650, loss[loss=0.1931, simple_loss=0.3201, pruned_loss=0.03303, over 17953.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2343, pruned_loss=0.03616, over 3797353.76 frames. ], batch size: 633, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:30:02,504 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7446, 3.8510, 2.3168, 3.9351, 4.1062, 1.8748, 3.4052, 3.2086], + device='cuda:0'), covar=tensor([0.0785, 0.0851, 0.2758, 0.0861, 0.0629, 0.2820, 0.1118, 0.0929], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0268, 0.0237, 0.0288, 0.0266, 0.0209, 0.0246, 0.0208], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 14:31:21,295 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74896.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:31:26,085 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9305, 2.2686, 2.9249, 3.0848, 3.5767, 3.8598, 3.7822, 3.8190], + device='cuda:0'), covar=tensor([0.1027, 0.1864, 0.1307, 0.0787, 0.0497, 0.0337, 0.0398, 0.0440], + device='cuda:0'), in_proj_covar=tensor([0.0166, 0.0171, 0.0183, 0.0158, 0.0142, 0.0138, 0.0131, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 14:31:32,909 INFO [train.py:892] (0/4) Epoch 41, batch 700, loss[loss=0.1614, simple_loss=0.2392, pruned_loss=0.04183, over 19720.00 frames. ], tot_loss[loss=0.1527, simple_loss=0.234, pruned_loss=0.03569, over 3831229.36 frames. ], batch size: 269, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:31:51,784 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74909.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:32:42,139 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.552e+02 3.676e+02 4.188e+02 5.143e+02 8.176e+02, threshold=8.375e+02, percent-clipped=0.0 +2023-03-29 14:33:13,543 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74944.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:33:30,569 INFO [train.py:892] (0/4) Epoch 41, batch 750, loss[loss=0.2402, simple_loss=0.3181, pruned_loss=0.08111, over 19213.00 frames. ], tot_loss[loss=0.1522, simple_loss=0.2338, pruned_loss=0.03532, over 3855396.03 frames. ], batch size: 452, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:33:31,466 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74951.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:34:02,607 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74964.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:34:15,012 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74970.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:35:26,030 INFO [train.py:892] (0/4) Epoch 41, batch 800, loss[loss=0.1328, simple_loss=0.2164, pruned_loss=0.02461, over 19899.00 frames. ], tot_loss[loss=0.1521, simple_loss=0.234, pruned_loss=0.03509, over 3875936.90 frames. ], batch size: 116, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:36:21,576 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75025.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:36:36,273 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.639e+02 3.669e+02 4.402e+02 5.474e+02 1.379e+03, threshold=8.804e+02, percent-clipped=2.0 +2023-03-29 14:37:20,366 INFO [train.py:892] (0/4) Epoch 41, batch 850, loss[loss=0.1503, simple_loss=0.2346, pruned_loss=0.03299, over 19837.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2352, pruned_loss=0.03574, over 3892032.08 frames. ], batch size: 208, lr: 3.81e-03, grad_scale: 16.0 +2023-03-29 14:37:42,905 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4359, 3.2844, 5.1526, 3.7906, 4.0686, 3.7294, 2.7801, 2.9048], + device='cuda:0'), covar=tensor([0.0977, 0.3247, 0.0358, 0.1073, 0.1733, 0.1698, 0.2690, 0.2745], + device='cuda:0'), in_proj_covar=tensor([0.0358, 0.0401, 0.0355, 0.0297, 0.0382, 0.0395, 0.0388, 0.0361], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 14:37:57,716 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-03-29 14:39:13,357 INFO [train.py:892] (0/4) Epoch 41, batch 900, loss[loss=0.1477, simple_loss=0.2253, pruned_loss=0.03508, over 19767.00 frames. ], tot_loss[loss=0.1523, simple_loss=0.2339, pruned_loss=0.03536, over 3905910.62 frames. ], batch size: 119, lr: 3.80e-03, grad_scale: 16.0 +2023-03-29 14:39:44,935 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6803, 2.6379, 2.7088, 2.2731, 2.8500, 2.4483, 2.8099, 2.7514], + device='cuda:0'), covar=tensor([0.0557, 0.0539, 0.0589, 0.0851, 0.0424, 0.0543, 0.0443, 0.0407], + device='cuda:0'), in_proj_covar=tensor([0.0084, 0.0093, 0.0090, 0.0115, 0.0086, 0.0089, 0.0085, 0.0080], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 14:39:50,651 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6769, 3.6091, 3.5310, 3.3180, 3.7253, 2.6782, 2.9573, 1.5869], + device='cuda:0'), covar=tensor([0.0290, 0.0312, 0.0222, 0.0265, 0.0232, 0.1434, 0.0820, 0.2224], + device='cuda:0'), in_proj_covar=tensor([0.0109, 0.0153, 0.0118, 0.0140, 0.0123, 0.0140, 0.0147, 0.0132], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-29 14:40:21,667 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.262e+02 3.745e+02 4.337e+02 5.272e+02 8.502e+02, threshold=8.673e+02, percent-clipped=0.0 +2023-03-29 14:40:30,133 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5875, 4.3120, 4.3698, 4.5822, 4.2900, 4.7414, 4.6936, 4.8611], + device='cuda:0'), covar=tensor([0.0734, 0.0460, 0.0563, 0.0428, 0.0797, 0.0491, 0.0456, 0.0338], + device='cuda:0'), in_proj_covar=tensor([0.0164, 0.0190, 0.0211, 0.0186, 0.0186, 0.0170, 0.0161, 0.0210], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 14:41:05,569 INFO [train.py:892] (0/4) Epoch 41, batch 950, loss[loss=0.1365, simple_loss=0.2209, pruned_loss=0.02605, over 19832.00 frames. ], tot_loss[loss=0.1523, simple_loss=0.234, pruned_loss=0.03527, over 3915298.97 frames. ], batch size: 93, lr: 3.80e-03, grad_scale: 16.0 +2023-03-29 14:42:07,713 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5725, 2.5529, 2.6250, 2.6642, 2.5469, 2.6082, 2.5873, 2.7324], + device='cuda:0'), covar=tensor([0.0382, 0.0351, 0.0377, 0.0334, 0.0550, 0.0386, 0.0519, 0.0354], + device='cuda:0'), in_proj_covar=tensor([0.0096, 0.0090, 0.0092, 0.0087, 0.0099, 0.0093, 0.0108, 0.0081], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 14:42:10,256 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=5.03 vs. limit=5.0 +2023-03-29 14:42:59,208 INFO [train.py:892] (0/4) Epoch 41, batch 1000, loss[loss=0.1492, simple_loss=0.2368, pruned_loss=0.03081, over 19806.00 frames. ], tot_loss[loss=0.1524, simple_loss=0.2342, pruned_loss=0.03535, over 3923199.06 frames. ], batch size: 82, lr: 3.80e-03, grad_scale: 16.0 +2023-03-29 14:44:01,234 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75228.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:44:07,914 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.478e+02 3.475e+02 3.874e+02 4.705e+02 1.241e+03, threshold=7.748e+02, percent-clipped=2.0 +2023-03-29 14:44:48,141 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9370, 5.0432, 5.3326, 5.1045, 5.2060, 4.9161, 5.0746, 4.9173], + device='cuda:0'), covar=tensor([0.1453, 0.1710, 0.0882, 0.1390, 0.0799, 0.0924, 0.1888, 0.1825], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0351, 0.0385, 0.0318, 0.0291, 0.0297, 0.0376, 0.0409], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 14:44:53,969 INFO [train.py:892] (0/4) Epoch 41, batch 1050, loss[loss=0.1442, simple_loss=0.2365, pruned_loss=0.02595, over 19576.00 frames. ], tot_loss[loss=0.1517, simple_loss=0.2334, pruned_loss=0.03502, over 3930357.39 frames. ], batch size: 49, lr: 3.80e-03, grad_scale: 16.0 +2023-03-29 14:44:54,765 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75251.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:45:01,278 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1220, 4.3176, 2.5218, 4.4832, 4.6480, 2.0297, 3.9209, 3.5776], + device='cuda:0'), covar=tensor([0.0703, 0.0778, 0.2644, 0.0799, 0.0584, 0.2745, 0.0986, 0.0827], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0268, 0.0237, 0.0288, 0.0266, 0.0208, 0.0246, 0.0209], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 14:45:13,406 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2071, 2.6611, 4.3525, 3.7660, 4.2235, 4.3819, 4.1942, 4.0455], + device='cuda:0'), covar=tensor([0.0651, 0.0998, 0.0123, 0.0731, 0.0154, 0.0221, 0.0181, 0.0194], + device='cuda:0'), in_proj_covar=tensor([0.0104, 0.0106, 0.0091, 0.0153, 0.0090, 0.0102, 0.0093, 0.0090], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 14:45:20,337 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0327, 2.9776, 5.0302, 4.2791, 4.8434, 4.9607, 4.8366, 4.6496], + device='cuda:0'), covar=tensor([0.0471, 0.0950, 0.0096, 0.0887, 0.0133, 0.0205, 0.0151, 0.0150], + device='cuda:0'), in_proj_covar=tensor([0.0104, 0.0106, 0.0091, 0.0153, 0.0090, 0.0102, 0.0093, 0.0090], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 14:45:26,080 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75265.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:45:38,227 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75270.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:46:19,844 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75289.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:46:40,118 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75299.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:46:43,190 INFO [train.py:892] (0/4) Epoch 41, batch 1100, loss[loss=0.1285, simple_loss=0.2107, pruned_loss=0.02315, over 19796.00 frames. ], tot_loss[loss=0.1519, simple_loss=0.2335, pruned_loss=0.03514, over 3935616.95 frames. ], batch size: 105, lr: 3.80e-03, grad_scale: 16.0 +2023-03-29 14:46:53,668 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3615, 3.7515, 3.1652, 2.7984, 3.2597, 3.5983, 3.5761, 3.6309], + device='cuda:0'), covar=tensor([0.0296, 0.0219, 0.0313, 0.0461, 0.0339, 0.0299, 0.0192, 0.0210], + device='cuda:0'), in_proj_covar=tensor([0.0114, 0.0106, 0.0108, 0.0108, 0.0111, 0.0096, 0.0097, 0.0096], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 14:47:27,536 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75320.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:47:51,231 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.168e+02 3.537e+02 4.080e+02 4.857e+02 1.316e+03, threshold=8.160e+02, percent-clipped=3.0 +2023-03-29 14:47:53,867 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75331.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 14:48:39,009 INFO [train.py:892] (0/4) Epoch 41, batch 1150, loss[loss=0.1346, simple_loss=0.2078, pruned_loss=0.03068, over 19818.00 frames. ], tot_loss[loss=0.1519, simple_loss=0.2334, pruned_loss=0.03525, over 3935113.25 frames. ], batch size: 184, lr: 3.80e-03, grad_scale: 16.0 +2023-03-29 14:48:46,339 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2717, 5.5778, 5.6434, 5.5100, 5.3166, 5.6197, 5.0636, 5.0707], + device='cuda:0'), covar=tensor([0.0483, 0.0484, 0.0469, 0.0447, 0.0600, 0.0457, 0.0696, 0.1007], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0307, 0.0317, 0.0276, 0.0286, 0.0268, 0.0280, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 14:49:19,546 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-03-29 14:50:32,183 INFO [train.py:892] (0/4) Epoch 41, batch 1200, loss[loss=0.1575, simple_loss=0.2349, pruned_loss=0.04008, over 19792.00 frames. ], tot_loss[loss=0.1518, simple_loss=0.2329, pruned_loss=0.03533, over 3939851.82 frames. ], batch size: 236, lr: 3.80e-03, grad_scale: 16.0 +2023-03-29 14:50:43,842 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-03-29 14:51:41,595 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.627e+02 3.559e+02 4.233e+02 5.176e+02 1.064e+03, threshold=8.467e+02, percent-clipped=2.0 +2023-03-29 14:52:18,447 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.98 vs. limit=5.0 +2023-03-29 14:52:27,171 INFO [train.py:892] (0/4) Epoch 41, batch 1250, loss[loss=0.1382, simple_loss=0.2237, pruned_loss=0.02633, over 19823.00 frames. ], tot_loss[loss=0.1513, simple_loss=0.2323, pruned_loss=0.03517, over 3941924.92 frames. ], batch size: 167, lr: 3.80e-03, grad_scale: 32.0 +2023-03-29 14:53:32,136 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.4744, 5.8600, 5.9781, 5.7594, 5.7356, 5.7011, 5.6477, 5.5548], + device='cuda:0'), covar=tensor([0.1431, 0.1433, 0.0798, 0.1109, 0.0616, 0.0723, 0.1731, 0.1722], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0347, 0.0381, 0.0314, 0.0288, 0.0294, 0.0372, 0.0404], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 14:53:47,566 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2533, 3.1848, 5.1165, 3.6164, 3.8094, 3.6135, 2.5825, 2.8863], + device='cuda:0'), covar=tensor([0.0988, 0.3145, 0.0342, 0.1112, 0.1959, 0.1595, 0.2673, 0.2670], + device='cuda:0'), in_proj_covar=tensor([0.0356, 0.0399, 0.0353, 0.0294, 0.0379, 0.0393, 0.0385, 0.0359], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 14:53:56,068 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-29 14:54:20,115 INFO [train.py:892] (0/4) Epoch 41, batch 1300, loss[loss=0.2501, simple_loss=0.3231, pruned_loss=0.0886, over 19402.00 frames. ], tot_loss[loss=0.1515, simple_loss=0.2324, pruned_loss=0.03535, over 3944482.12 frames. ], batch size: 431, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 14:55:27,414 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.667e+02 3.652e+02 4.285e+02 4.954e+02 9.983e+02, threshold=8.569e+02, percent-clipped=1.0 +2023-03-29 14:56:10,878 INFO [train.py:892] (0/4) Epoch 41, batch 1350, loss[loss=0.1373, simple_loss=0.2219, pruned_loss=0.02634, over 19891.00 frames. ], tot_loss[loss=0.1518, simple_loss=0.2329, pruned_loss=0.03528, over 3945020.42 frames. ], batch size: 97, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 14:56:44,978 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75565.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:56:48,341 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0402, 4.7442, 4.8312, 5.0583, 4.7487, 5.3098, 5.2266, 5.3729], + device='cuda:0'), covar=tensor([0.0745, 0.0509, 0.0589, 0.0447, 0.0798, 0.0553, 0.0492, 0.0388], + device='cuda:0'), in_proj_covar=tensor([0.0163, 0.0189, 0.0211, 0.0186, 0.0186, 0.0169, 0.0161, 0.0209], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 14:57:28,138 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75584.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:58:05,442 INFO [train.py:892] (0/4) Epoch 41, batch 1400, loss[loss=0.1362, simple_loss=0.2214, pruned_loss=0.02547, over 19843.00 frames. ], tot_loss[loss=0.1506, simple_loss=0.2318, pruned_loss=0.03474, over 3946880.08 frames. ], batch size: 59, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 14:58:32,979 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75613.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:58:49,304 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75620.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 14:59:04,016 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75626.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 14:59:14,867 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.057e+02 3.782e+02 4.383e+02 5.452e+02 9.029e+02, threshold=8.766e+02, percent-clipped=1.0 +2023-03-29 14:59:57,699 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1970, 4.3143, 2.5885, 4.5386, 4.7698, 2.1314, 4.0162, 3.6342], + device='cuda:0'), covar=tensor([0.0777, 0.0761, 0.2766, 0.0831, 0.0526, 0.2894, 0.0962, 0.0906], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0267, 0.0237, 0.0287, 0.0266, 0.0208, 0.0245, 0.0209], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 15:00:00,087 INFO [train.py:892] (0/4) Epoch 41, batch 1450, loss[loss=0.1482, simple_loss=0.2257, pruned_loss=0.03537, over 19636.00 frames. ], tot_loss[loss=0.1513, simple_loss=0.2326, pruned_loss=0.03497, over 3946675.17 frames. ], batch size: 68, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 15:00:40,615 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75668.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:01:52,777 INFO [train.py:892] (0/4) Epoch 41, batch 1500, loss[loss=0.1351, simple_loss=0.2177, pruned_loss=0.02625, over 19689.00 frames. ], tot_loss[loss=0.1522, simple_loss=0.2337, pruned_loss=0.03534, over 3947451.34 frames. ], batch size: 74, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 15:01:56,337 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-29 15:03:00,956 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.571e+02 3.753e+02 4.206e+02 5.021e+02 8.487e+02, threshold=8.413e+02, percent-clipped=0.0 +2023-03-29 15:03:47,906 INFO [train.py:892] (0/4) Epoch 41, batch 1550, loss[loss=0.1237, simple_loss=0.201, pruned_loss=0.02318, over 19837.00 frames. ], tot_loss[loss=0.1515, simple_loss=0.2328, pruned_loss=0.03511, over 3946858.69 frames. ], batch size: 90, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 15:05:37,487 INFO [train.py:892] (0/4) Epoch 41, batch 1600, loss[loss=0.1591, simple_loss=0.2392, pruned_loss=0.03946, over 19670.00 frames. ], tot_loss[loss=0.1518, simple_loss=0.2331, pruned_loss=0.03521, over 3946137.72 frames. ], batch size: 64, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 15:06:45,418 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.043e+02 3.453e+02 4.213e+02 4.846e+02 1.121e+03, threshold=8.426e+02, percent-clipped=1.0 +2023-03-29 15:07:11,478 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5822, 4.6553, 2.8369, 4.9099, 5.1400, 2.3320, 4.3550, 3.8864], + device='cuda:0'), covar=tensor([0.0630, 0.0689, 0.2511, 0.0662, 0.0432, 0.2730, 0.0912, 0.0840], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0269, 0.0237, 0.0288, 0.0267, 0.0208, 0.0246, 0.0209], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 15:07:29,996 INFO [train.py:892] (0/4) Epoch 41, batch 1650, loss[loss=0.1413, simple_loss=0.2282, pruned_loss=0.02724, over 19781.00 frames. ], tot_loss[loss=0.1521, simple_loss=0.2335, pruned_loss=0.03536, over 3946615.89 frames. ], batch size: 87, lr: 3.79e-03, grad_scale: 32.0 +2023-03-29 15:08:44,697 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75884.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:09:22,839 INFO [train.py:892] (0/4) Epoch 41, batch 1700, loss[loss=0.1369, simple_loss=0.2211, pruned_loss=0.02637, over 19754.00 frames. ], tot_loss[loss=0.1512, simple_loss=0.2323, pruned_loss=0.03499, over 3947747.50 frames. ], batch size: 102, lr: 3.78e-03, grad_scale: 32.0 +2023-03-29 15:10:20,582 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75926.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 15:10:32,269 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.390e+02 3.529e+02 4.151e+02 4.845e+02 7.414e+02, threshold=8.303e+02, percent-clipped=0.0 +2023-03-29 15:10:35,175 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75932.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:11:15,229 INFO [train.py:892] (0/4) Epoch 41, batch 1750, loss[loss=0.1536, simple_loss=0.2266, pruned_loss=0.04033, over 19797.00 frames. ], tot_loss[loss=0.1505, simple_loss=0.2317, pruned_loss=0.03467, over 3949107.54 frames. ], batch size: 172, lr: 3.78e-03, grad_scale: 32.0 +2023-03-29 15:12:01,834 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75974.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:12:20,341 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-29 15:12:55,455 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-76000.pt +2023-03-29 15:13:01,529 INFO [train.py:892] (0/4) Epoch 41, batch 1800, loss[loss=0.1348, simple_loss=0.2056, pruned_loss=0.03201, over 19804.00 frames. ], tot_loss[loss=0.1511, simple_loss=0.2321, pruned_loss=0.03501, over 3948544.73 frames. ], batch size: 47, lr: 3.78e-03, grad_scale: 32.0 +2023-03-29 15:13:18,853 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7487, 3.5742, 3.7869, 3.0313, 4.0793, 3.2427, 3.5683, 3.9306], + device='cuda:0'), covar=tensor([0.0829, 0.0425, 0.0618, 0.0775, 0.0316, 0.0521, 0.0448, 0.0305], + device='cuda:0'), in_proj_covar=tensor([0.0085, 0.0094, 0.0090, 0.0115, 0.0086, 0.0090, 0.0086, 0.0081], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 15:13:46,292 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8015, 5.0543, 5.1307, 5.0099, 4.7583, 5.1053, 4.5742, 4.6261], + device='cuda:0'), covar=tensor([0.0497, 0.0487, 0.0488, 0.0422, 0.0645, 0.0513, 0.0727, 0.0970], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0307, 0.0317, 0.0277, 0.0286, 0.0268, 0.0281, 0.0330], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 15:13:57,892 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.757e+02 3.667e+02 4.191e+02 5.134e+02 9.957e+02, threshold=8.381e+02, percent-clipped=3.0 +2023-03-29 15:14:33,344 INFO [train.py:892] (0/4) Epoch 41, batch 1850, loss[loss=0.1443, simple_loss=0.2313, pruned_loss=0.02859, over 19658.00 frames. ], tot_loss[loss=0.1508, simple_loss=0.2324, pruned_loss=0.03458, over 3948787.69 frames. ], batch size: 55, lr: 3.78e-03, grad_scale: 32.0 +2023-03-29 15:14:40,943 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-41.pt +2023-03-29 15:15:35,916 INFO [train.py:892] (0/4) Epoch 42, batch 0, loss[loss=0.1527, simple_loss=0.2383, pruned_loss=0.0336, over 19733.00 frames. ], tot_loss[loss=0.1527, simple_loss=0.2383, pruned_loss=0.0336, over 19733.00 frames. ], batch size: 76, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:15:35,917 INFO [train.py:917] (0/4) Computing validation loss +2023-03-29 15:16:08,144 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.2785, 2.2494, 2.4214, 2.3138, 2.3541, 2.3488, 2.3333, 2.3943], + device='cuda:0'), covar=tensor([0.0414, 0.0401, 0.0350, 0.0385, 0.0495, 0.0417, 0.0499, 0.0425], + device='cuda:0'), in_proj_covar=tensor([0.0096, 0.0090, 0.0093, 0.0088, 0.0101, 0.0093, 0.0109, 0.0081], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 15:16:12,640 INFO [train.py:926] (0/4) Epoch 42, validation: loss=0.1864, simple_loss=0.2496, pruned_loss=0.06163, over 2883724.00 frames. +2023-03-29 15:16:12,641 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-29 15:17:10,652 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9999, 2.9872, 3.0290, 2.4429, 3.1197, 2.6494, 2.9544, 2.9816], + device='cuda:0'), covar=tensor([0.0500, 0.0477, 0.0670, 0.0888, 0.0414, 0.0598, 0.0562, 0.0446], + device='cuda:0'), in_proj_covar=tensor([0.0084, 0.0093, 0.0090, 0.0114, 0.0085, 0.0089, 0.0086, 0.0081], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 15:18:08,451 INFO [train.py:892] (0/4) Epoch 42, batch 50, loss[loss=0.1455, simple_loss=0.2224, pruned_loss=0.03428, over 19855.00 frames. ], tot_loss[loss=0.1492, simple_loss=0.2293, pruned_loss=0.03455, over 890968.88 frames. ], batch size: 165, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:19:07,893 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.581e+02 3.605e+02 4.013e+02 4.746e+02 9.116e+02, threshold=8.026e+02, percent-clipped=1.0 +2023-03-29 15:19:16,995 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-29 15:20:05,973 INFO [train.py:892] (0/4) Epoch 42, batch 100, loss[loss=0.1684, simple_loss=0.2441, pruned_loss=0.04633, over 19828.00 frames. ], tot_loss[loss=0.1486, simple_loss=0.2288, pruned_loss=0.03425, over 1570294.59 frames. ], batch size: 177, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:21:58,763 INFO [train.py:892] (0/4) Epoch 42, batch 150, loss[loss=0.1507, simple_loss=0.2335, pruned_loss=0.03393, over 19818.00 frames. ], tot_loss[loss=0.1482, simple_loss=0.2288, pruned_loss=0.03378, over 2099178.72 frames. ], batch size: 72, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:22:57,753 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.293e+02 3.477e+02 4.297e+02 5.129e+02 1.202e+03, threshold=8.594e+02, percent-clipped=2.0 +2023-03-29 15:23:17,233 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.2536, 2.4735, 2.3192, 1.7538, 2.4127, 2.4763, 2.4091, 2.5217], + device='cuda:0'), covar=tensor([0.0460, 0.0435, 0.0391, 0.0647, 0.0416, 0.0390, 0.0380, 0.0318], + device='cuda:0'), in_proj_covar=tensor([0.0115, 0.0108, 0.0108, 0.0109, 0.0112, 0.0097, 0.0099, 0.0097], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 15:23:55,629 INFO [train.py:892] (0/4) Epoch 42, batch 200, loss[loss=0.1445, simple_loss=0.2315, pruned_loss=0.02873, over 19800.00 frames. ], tot_loss[loss=0.1513, simple_loss=0.2327, pruned_loss=0.03492, over 2506536.34 frames. ], batch size: 51, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:24:27,097 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76269.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 15:25:30,102 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76294.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:25:57,519 INFO [train.py:892] (0/4) Epoch 42, batch 250, loss[loss=0.1455, simple_loss=0.2162, pruned_loss=0.03742, over 19837.00 frames. ], tot_loss[loss=0.1496, simple_loss=0.2311, pruned_loss=0.03411, over 2826834.29 frames. ], batch size: 197, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:26:38,820 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76323.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 15:26:55,197 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76330.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 15:26:57,694 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.384e+02 3.381e+02 3.992e+02 4.681e+02 8.709e+02, threshold=7.984e+02, percent-clipped=1.0 +2023-03-29 15:26:58,839 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2031, 4.3754, 2.5639, 4.5286, 4.7655, 2.0596, 4.0020, 3.5618], + device='cuda:0'), covar=tensor([0.0674, 0.0724, 0.2703, 0.0815, 0.0575, 0.2915, 0.0992, 0.0874], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0271, 0.0240, 0.0292, 0.0270, 0.0210, 0.0248, 0.0211], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 15:27:53,471 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-29 15:27:54,591 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76355.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:27:55,595 INFO [train.py:892] (0/4) Epoch 42, batch 300, loss[loss=0.1427, simple_loss=0.2229, pruned_loss=0.03123, over 19668.00 frames. ], tot_loss[loss=0.1495, simple_loss=0.231, pruned_loss=0.03398, over 3075530.54 frames. ], batch size: 64, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:28:02,864 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9453, 2.5947, 3.1040, 2.6565, 3.2349, 3.1591, 3.7940, 4.0954], + device='cuda:0'), covar=tensor([0.0629, 0.1933, 0.1666, 0.2229, 0.1622, 0.1706, 0.0634, 0.0605], + device='cuda:0'), in_proj_covar=tensor([0.0260, 0.0246, 0.0273, 0.0261, 0.0306, 0.0264, 0.0240, 0.0267], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 15:28:18,231 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6359, 2.6222, 2.7247, 2.7558, 2.6309, 2.6877, 2.7049, 2.8401], + device='cuda:0'), covar=tensor([0.0358, 0.0387, 0.0410, 0.0332, 0.0506, 0.0372, 0.0445, 0.0382], + device='cuda:0'), in_proj_covar=tensor([0.0097, 0.0092, 0.0094, 0.0088, 0.0102, 0.0094, 0.0110, 0.0082], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 15:29:02,563 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76384.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 15:29:53,453 INFO [train.py:892] (0/4) Epoch 42, batch 350, loss[loss=0.1699, simple_loss=0.246, pruned_loss=0.04692, over 19730.00 frames. ], tot_loss[loss=0.15, simple_loss=0.2314, pruned_loss=0.03429, over 3269729.39 frames. ], batch size: 291, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:30:28,413 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9445, 5.1939, 5.2645, 5.1466, 4.9425, 5.2386, 4.7441, 4.7286], + device='cuda:0'), covar=tensor([0.0525, 0.0506, 0.0488, 0.0431, 0.0674, 0.0506, 0.0647, 0.0965], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0309, 0.0319, 0.0278, 0.0288, 0.0268, 0.0282, 0.0331], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 15:30:58,171 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.636e+02 3.511e+02 4.196e+02 4.977e+02 1.197e+03, threshold=8.393e+02, percent-clipped=1.0 +2023-03-29 15:31:51,126 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3888, 3.2521, 5.0692, 3.8515, 3.9092, 3.7711, 2.7956, 2.9586], + device='cuda:0'), covar=tensor([0.0946, 0.3235, 0.0354, 0.0997, 0.1852, 0.1513, 0.2726, 0.2539], + device='cuda:0'), in_proj_covar=tensor([0.0360, 0.0402, 0.0357, 0.0298, 0.0382, 0.0397, 0.0389, 0.0363], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 15:32:05,168 INFO [train.py:892] (0/4) Epoch 42, batch 400, loss[loss=0.1721, simple_loss=0.2568, pruned_loss=0.04372, over 19916.00 frames. ], tot_loss[loss=0.1497, simple_loss=0.231, pruned_loss=0.03415, over 3419458.18 frames. ], batch size: 53, lr: 3.73e-03, grad_scale: 32.0 +2023-03-29 15:32:13,634 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0322, 3.7254, 3.8328, 3.9774, 3.7865, 3.9552, 4.0889, 4.2717], + device='cuda:0'), covar=tensor([0.0677, 0.0466, 0.0559, 0.0433, 0.0722, 0.0611, 0.0480, 0.0317], + device='cuda:0'), in_proj_covar=tensor([0.0160, 0.0185, 0.0207, 0.0183, 0.0183, 0.0166, 0.0158, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 15:32:42,718 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-03-29 15:34:11,916 INFO [train.py:892] (0/4) Epoch 42, batch 450, loss[loss=0.1738, simple_loss=0.2548, pruned_loss=0.04635, over 19581.00 frames. ], tot_loss[loss=0.1511, simple_loss=0.2324, pruned_loss=0.03485, over 3537994.94 frames. ], batch size: 49, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:34:46,115 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-03-29 15:35:17,323 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.526e+02 3.433e+02 4.142e+02 5.148e+02 8.158e+02, threshold=8.284e+02, percent-clipped=0.0 +2023-03-29 15:35:19,334 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-29 15:35:22,978 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-29 15:36:19,065 INFO [train.py:892] (0/4) Epoch 42, batch 500, loss[loss=0.1351, simple_loss=0.2132, pruned_loss=0.02855, over 19898.00 frames. ], tot_loss[loss=0.1507, simple_loss=0.232, pruned_loss=0.0347, over 3628866.70 frames. ], batch size: 113, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:36:37,762 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-03-29 15:37:31,938 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2489, 5.5427, 5.7450, 5.5109, 5.5324, 5.3961, 5.4262, 5.2673], + device='cuda:0'), covar=tensor([0.1398, 0.1306, 0.0748, 0.1195, 0.0669, 0.0726, 0.1575, 0.1843], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0349, 0.0383, 0.0316, 0.0291, 0.0295, 0.0371, 0.0403], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 15:38:21,666 INFO [train.py:892] (0/4) Epoch 42, batch 550, loss[loss=0.1503, simple_loss=0.2263, pruned_loss=0.0372, over 19846.00 frames. ], tot_loss[loss=0.151, simple_loss=0.2323, pruned_loss=0.03488, over 3699821.98 frames. ], batch size: 144, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:38:38,355 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0187, 2.3054, 2.8285, 3.2412, 3.6565, 4.0845, 3.9803, 3.9759], + device='cuda:0'), covar=tensor([0.1050, 0.2069, 0.1605, 0.0731, 0.0451, 0.0279, 0.0407, 0.0493], + device='cuda:0'), in_proj_covar=tensor([0.0168, 0.0171, 0.0184, 0.0158, 0.0143, 0.0139, 0.0132, 0.0123], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 15:39:05,582 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76625.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 15:39:18,270 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.079e+02 3.230e+02 3.815e+02 4.831e+02 8.297e+02, threshold=7.631e+02, percent-clipped=1.0 +2023-03-29 15:40:07,729 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76650.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:40:21,751 INFO [train.py:892] (0/4) Epoch 42, batch 600, loss[loss=0.1328, simple_loss=0.2206, pruned_loss=0.02249, over 19757.00 frames. ], tot_loss[loss=0.1503, simple_loss=0.2319, pruned_loss=0.03431, over 3753556.60 frames. ], batch size: 100, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:41:18,665 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76679.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 15:41:55,459 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3232, 4.0918, 4.1511, 4.3332, 4.0154, 4.3711, 4.3886, 4.6028], + device='cuda:0'), covar=tensor([0.0667, 0.0410, 0.0510, 0.0388, 0.0736, 0.0497, 0.0427, 0.0295], + device='cuda:0'), in_proj_covar=tensor([0.0161, 0.0186, 0.0209, 0.0184, 0.0184, 0.0167, 0.0159, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 15:42:25,296 INFO [train.py:892] (0/4) Epoch 42, batch 650, loss[loss=0.1472, simple_loss=0.219, pruned_loss=0.03766, over 19836.00 frames. ], tot_loss[loss=0.1512, simple_loss=0.2325, pruned_loss=0.03501, over 3797510.60 frames. ], batch size: 171, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:43:30,260 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.500e+02 3.354e+02 4.128e+02 5.229e+02 1.136e+03, threshold=8.256e+02, percent-clipped=6.0 +2023-03-29 15:44:09,457 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0965, 3.0533, 3.2688, 2.6463, 3.3913, 2.8097, 3.1581, 3.3123], + device='cuda:0'), covar=tensor([0.0658, 0.0533, 0.0612, 0.0790, 0.0346, 0.0531, 0.0494, 0.0370], + device='cuda:0'), in_proj_covar=tensor([0.0084, 0.0094, 0.0090, 0.0115, 0.0085, 0.0090, 0.0086, 0.0081], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 15:44:32,834 INFO [train.py:892] (0/4) Epoch 42, batch 700, loss[loss=0.1469, simple_loss=0.2307, pruned_loss=0.03154, over 19649.00 frames. ], tot_loss[loss=0.1509, simple_loss=0.2324, pruned_loss=0.0347, over 3830696.09 frames. ], batch size: 57, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:46:12,040 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-03-29 15:46:33,795 INFO [train.py:892] (0/4) Epoch 42, batch 750, loss[loss=0.1691, simple_loss=0.2455, pruned_loss=0.0463, over 19646.00 frames. ], tot_loss[loss=0.1501, simple_loss=0.2318, pruned_loss=0.03422, over 3857106.37 frames. ], batch size: 79, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:47:36,697 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.174e+02 3.338e+02 3.927e+02 4.685e+02 7.677e+02, threshold=7.855e+02, percent-clipped=0.0 +2023-03-29 15:47:59,379 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76839.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:48:42,210 INFO [train.py:892] (0/4) Epoch 42, batch 800, loss[loss=0.1581, simple_loss=0.2319, pruned_loss=0.04218, over 19763.00 frames. ], tot_loss[loss=0.1511, simple_loss=0.2328, pruned_loss=0.03471, over 3877390.96 frames. ], batch size: 217, lr: 3.72e-03, grad_scale: 32.0 +2023-03-29 15:50:35,349 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76900.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:50:50,195 INFO [train.py:892] (0/4) Epoch 42, batch 850, loss[loss=0.1532, simple_loss=0.2266, pruned_loss=0.0399, over 19762.00 frames. ], tot_loss[loss=0.15, simple_loss=0.2318, pruned_loss=0.03412, over 3894040.66 frames. ], batch size: 155, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 15:51:39,855 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76925.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 15:51:54,865 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.468e+02 3.585e+02 3.941e+02 4.611e+02 9.107e+02, threshold=7.882e+02, percent-clipped=2.0 +2023-03-29 15:52:43,359 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76950.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:52:58,349 INFO [train.py:892] (0/4) Epoch 42, batch 900, loss[loss=0.1894, simple_loss=0.2778, pruned_loss=0.05048, over 19689.00 frames. ], tot_loss[loss=0.1508, simple_loss=0.2324, pruned_loss=0.03464, over 3906450.31 frames. ], batch size: 56, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 15:53:41,251 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76973.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 15:53:56,886 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76979.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 15:54:43,469 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76998.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:55:01,511 INFO [train.py:892] (0/4) Epoch 42, batch 950, loss[loss=0.1662, simple_loss=0.2428, pruned_loss=0.04484, over 19808.00 frames. ], tot_loss[loss=0.1513, simple_loss=0.2327, pruned_loss=0.03491, over 3916680.13 frames. ], batch size: 47, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 15:55:44,701 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77027.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 15:55:53,567 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.093e+02 3.626e+02 4.310e+02 4.989e+02 8.639e+02, threshold=8.620e+02, percent-clipped=2.0 +2023-03-29 15:56:13,275 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1148, 3.1671, 3.3273, 3.4703, 3.1049, 3.1406, 3.0932, 3.3738], + device='cuda:0'), covar=tensor([0.0333, 0.0477, 0.0355, 0.0247, 0.0419, 0.0386, 0.0391, 0.0342], + device='cuda:0'), in_proj_covar=tensor([0.0097, 0.0092, 0.0094, 0.0088, 0.0101, 0.0094, 0.0110, 0.0082], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 15:56:59,629 INFO [train.py:892] (0/4) Epoch 42, batch 1000, loss[loss=0.1378, simple_loss=0.2163, pruned_loss=0.02961, over 19759.00 frames. ], tot_loss[loss=0.1516, simple_loss=0.2329, pruned_loss=0.03512, over 3923789.37 frames. ], batch size: 100, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 15:58:22,479 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5696, 4.4229, 4.3674, 4.1104, 4.5823, 3.1074, 3.7291, 2.2441], + device='cuda:0'), covar=tensor([0.0273, 0.0243, 0.0203, 0.0243, 0.0244, 0.1221, 0.0942, 0.1908], + device='cuda:0'), in_proj_covar=tensor([0.0109, 0.0153, 0.0117, 0.0139, 0.0123, 0.0139, 0.0144, 0.0132], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 15:58:37,313 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77095.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 15:59:02,971 INFO [train.py:892] (0/4) Epoch 42, batch 1050, loss[loss=0.1441, simple_loss=0.229, pruned_loss=0.02959, over 19855.00 frames. ], tot_loss[loss=0.1519, simple_loss=0.2335, pruned_loss=0.03521, over 3928740.48 frames. ], batch size: 85, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 16:00:02,946 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.508e+02 3.618e+02 4.248e+02 5.006e+02 7.833e+02, threshold=8.496e+02, percent-clipped=0.0 +2023-03-29 16:01:07,353 INFO [train.py:892] (0/4) Epoch 42, batch 1100, loss[loss=0.1549, simple_loss=0.2347, pruned_loss=0.03752, over 19582.00 frames. ], tot_loss[loss=0.152, simple_loss=0.2337, pruned_loss=0.03516, over 3934269.24 frames. ], batch size: 53, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 16:01:08,451 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77156.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 16:02:44,937 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77195.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:03:12,069 INFO [train.py:892] (0/4) Epoch 42, batch 1150, loss[loss=0.1452, simple_loss=0.2227, pruned_loss=0.03386, over 19802.00 frames. ], tot_loss[loss=0.1513, simple_loss=0.2327, pruned_loss=0.03492, over 3938089.14 frames. ], batch size: 148, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 16:03:27,109 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77211.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:04:16,668 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.545e+02 3.808e+02 4.258e+02 4.963e+02 8.566e+02, threshold=8.515e+02, percent-clipped=1.0 +2023-03-29 16:05:19,674 INFO [train.py:892] (0/4) Epoch 42, batch 1200, loss[loss=0.1556, simple_loss=0.2413, pruned_loss=0.03493, over 19754.00 frames. ], tot_loss[loss=0.153, simple_loss=0.2346, pruned_loss=0.03568, over 3938498.26 frames. ], batch size: 276, lr: 3.71e-03, grad_scale: 32.0 +2023-03-29 16:06:01,745 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77272.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:06:03,887 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77273.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:06:16,083 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77277.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:06:20,884 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6069, 4.5178, 4.9245, 4.4877, 4.0601, 4.7172, 4.5882, 5.0260], + device='cuda:0'), covar=tensor([0.0720, 0.0373, 0.0347, 0.0391, 0.0996, 0.0502, 0.0481, 0.0311], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0230, 0.0230, 0.0241, 0.0212, 0.0253, 0.0242, 0.0228], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 16:06:36,249 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8417, 3.7545, 4.0880, 3.7435, 3.5029, 3.9915, 3.8426, 4.1587], + device='cuda:0'), covar=tensor([0.0734, 0.0375, 0.0360, 0.0407, 0.1240, 0.0579, 0.0495, 0.0365], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0230, 0.0230, 0.0241, 0.0212, 0.0254, 0.0242, 0.0228], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 16:06:46,404 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-29 16:07:30,234 INFO [train.py:892] (0/4) Epoch 42, batch 1250, loss[loss=0.1436, simple_loss=0.2219, pruned_loss=0.03264, over 19726.00 frames. ], tot_loss[loss=0.1517, simple_loss=0.2331, pruned_loss=0.0352, over 3941535.10 frames. ], batch size: 63, lr: 3.70e-03, grad_scale: 32.0 +2023-03-29 16:08:30,830 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.435e+02 3.557e+02 4.199e+02 4.989e+02 7.948e+02, threshold=8.398e+02, percent-clipped=0.0 +2023-03-29 16:08:39,506 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77334.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:08:50,797 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77338.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:09:35,002 INFO [train.py:892] (0/4) Epoch 42, batch 1300, loss[loss=0.1449, simple_loss=0.228, pruned_loss=0.03096, over 19592.00 frames. ], tot_loss[loss=0.1505, simple_loss=0.2319, pruned_loss=0.0345, over 3945522.87 frames. ], batch size: 42, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:11:05,244 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.5140, 5.7447, 5.7960, 5.6509, 5.5230, 5.7672, 5.1483, 5.1752], + device='cuda:0'), covar=tensor([0.0411, 0.0416, 0.0405, 0.0419, 0.0493, 0.0421, 0.0663, 0.1080], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0311, 0.0321, 0.0280, 0.0290, 0.0270, 0.0285, 0.0334], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 16:11:31,837 INFO [train.py:892] (0/4) Epoch 42, batch 1350, loss[loss=0.1343, simple_loss=0.2143, pruned_loss=0.02718, over 19724.00 frames. ], tot_loss[loss=0.1503, simple_loss=0.2317, pruned_loss=0.03447, over 3946664.82 frames. ], batch size: 104, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:12:11,727 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.59 vs. limit=2.0 +2023-03-29 16:12:23,039 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.53 vs. limit=2.0 +2023-03-29 16:12:30,507 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.502e+02 3.333e+02 3.969e+02 4.891e+02 7.952e+02, threshold=7.937e+02, percent-clipped=0.0 +2023-03-29 16:13:20,619 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77451.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 16:13:34,048 INFO [train.py:892] (0/4) Epoch 42, batch 1400, loss[loss=0.1438, simple_loss=0.2297, pruned_loss=0.02891, over 19561.00 frames. ], tot_loss[loss=0.1492, simple_loss=0.2302, pruned_loss=0.03407, over 3946932.76 frames. ], batch size: 47, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:14:41,765 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.49 vs. limit=5.0 +2023-03-29 16:15:10,423 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77495.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:15:36,588 INFO [train.py:892] (0/4) Epoch 42, batch 1450, loss[loss=0.1344, simple_loss=0.2177, pruned_loss=0.02554, over 19874.00 frames. ], tot_loss[loss=0.1507, simple_loss=0.2318, pruned_loss=0.03478, over 3948646.11 frames. ], batch size: 89, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:16:40,347 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.038e+02 3.421e+02 4.179e+02 5.316e+02 8.524e+02, threshold=8.358e+02, percent-clipped=3.0 +2023-03-29 16:17:09,869 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77543.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:17:44,775 INFO [train.py:892] (0/4) Epoch 42, batch 1500, loss[loss=0.1508, simple_loss=0.2277, pruned_loss=0.03694, over 19878.00 frames. ], tot_loss[loss=0.15, simple_loss=0.2311, pruned_loss=0.03447, over 3949671.17 frames. ], batch size: 136, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:18:15,546 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77567.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:18:33,088 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.9931, 1.8469, 1.9909, 2.0238, 1.9913, 1.9954, 1.9097, 2.0387], + device='cuda:0'), covar=tensor([0.0435, 0.0485, 0.0410, 0.0399, 0.0498, 0.0397, 0.0577, 0.0390], + device='cuda:0'), in_proj_covar=tensor([0.0097, 0.0091, 0.0093, 0.0088, 0.0100, 0.0094, 0.0110, 0.0082], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 16:19:13,545 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8514, 2.8416, 2.9071, 2.3364, 2.9848, 2.5401, 2.8797, 2.9144], + device='cuda:0'), covar=tensor([0.0705, 0.0543, 0.0579, 0.0840, 0.0392, 0.0542, 0.0473, 0.0421], + device='cuda:0'), in_proj_covar=tensor([0.0084, 0.0094, 0.0090, 0.0114, 0.0085, 0.0089, 0.0087, 0.0082], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 16:19:49,119 INFO [train.py:892] (0/4) Epoch 42, batch 1550, loss[loss=0.1509, simple_loss=0.2369, pruned_loss=0.03244, over 19735.00 frames. ], tot_loss[loss=0.1499, simple_loss=0.2314, pruned_loss=0.03423, over 3948604.92 frames. ], batch size: 51, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:20:14,884 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77616.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:20:50,088 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77629.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:20:56,259 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.329e+02 3.286e+02 3.992e+02 4.919e+02 1.011e+03, threshold=7.983e+02, percent-clipped=1.0 +2023-03-29 16:20:59,589 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77633.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:21:54,208 INFO [train.py:892] (0/4) Epoch 42, batch 1600, loss[loss=0.1555, simple_loss=0.2357, pruned_loss=0.03765, over 19544.00 frames. ], tot_loss[loss=0.1498, simple_loss=0.2313, pruned_loss=0.03416, over 3948833.81 frames. ], batch size: 41, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:22:46,307 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77677.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:22:46,353 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77677.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:23:39,791 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2326, 2.5361, 3.4927, 2.8918, 2.9587, 2.8783, 2.1832, 2.3605], + device='cuda:0'), covar=tensor([0.1307, 0.2912, 0.0770, 0.1266, 0.2006, 0.1623, 0.2770, 0.2752], + device='cuda:0'), in_proj_covar=tensor([0.0357, 0.0401, 0.0357, 0.0298, 0.0380, 0.0396, 0.0388, 0.0363], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 16:23:59,701 INFO [train.py:892] (0/4) Epoch 42, batch 1650, loss[loss=0.1441, simple_loss=0.2171, pruned_loss=0.03551, over 19835.00 frames. ], tot_loss[loss=0.1499, simple_loss=0.2311, pruned_loss=0.0344, over 3950084.17 frames. ], batch size: 177, lr: 3.70e-03, grad_scale: 16.0 +2023-03-29 16:25:04,057 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.346e+02 3.655e+02 4.276e+02 5.251e+02 1.145e+03, threshold=8.553e+02, percent-clipped=2.0 +2023-03-29 16:25:19,085 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77738.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:25:51,764 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77751.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 16:26:01,326 INFO [train.py:892] (0/4) Epoch 42, batch 1700, loss[loss=0.1497, simple_loss=0.236, pruned_loss=0.0317, over 19862.00 frames. ], tot_loss[loss=0.1505, simple_loss=0.2318, pruned_loss=0.03464, over 3950554.64 frames. ], batch size: 99, lr: 3.69e-03, grad_scale: 16.0 +2023-03-29 16:26:21,885 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-03-29 16:27:35,275 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77799.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:27:51,252 INFO [train.py:892] (0/4) Epoch 42, batch 1750, loss[loss=0.1774, simple_loss=0.2544, pruned_loss=0.05022, over 19653.00 frames. ], tot_loss[loss=0.1514, simple_loss=0.233, pruned_loss=0.03491, over 3948056.85 frames. ], batch size: 343, lr: 3.69e-03, grad_scale: 16.0 +2023-03-29 16:28:37,926 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7380, 2.8025, 1.8650, 3.0977, 2.9011, 3.0390, 3.1316, 2.5987], + device='cuda:0'), covar=tensor([0.0787, 0.0856, 0.1593, 0.0823, 0.0692, 0.0581, 0.0623, 0.0962], + device='cuda:0'), in_proj_covar=tensor([0.0152, 0.0152, 0.0148, 0.0163, 0.0142, 0.0147, 0.0157, 0.0155], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-29 16:28:49,554 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.255e+02 3.364e+02 4.068e+02 4.963e+02 7.822e+02, threshold=8.136e+02, percent-clipped=0.0 +2023-03-29 16:29:41,557 INFO [train.py:892] (0/4) Epoch 42, batch 1800, loss[loss=0.1523, simple_loss=0.2347, pruned_loss=0.03495, over 19642.00 frames. ], tot_loss[loss=0.1511, simple_loss=0.2325, pruned_loss=0.03487, over 3948454.44 frames. ], batch size: 66, lr: 3.69e-03, grad_scale: 16.0 +2023-03-29 16:30:04,969 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77867.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:31:23,150 INFO [train.py:892] (0/4) Epoch 42, batch 1850, loss[loss=0.1668, simple_loss=0.2513, pruned_loss=0.04111, over 19811.00 frames. ], tot_loss[loss=0.1526, simple_loss=0.2349, pruned_loss=0.03515, over 3948020.02 frames. ], batch size: 57, lr: 3.69e-03, grad_scale: 16.0 +2023-03-29 16:31:30,759 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-42.pt +2023-03-29 16:32:27,850 INFO [train.py:892] (0/4) Epoch 43, batch 0, loss[loss=0.1557, simple_loss=0.235, pruned_loss=0.03825, over 19800.00 frames. ], tot_loss[loss=0.1557, simple_loss=0.235, pruned_loss=0.03825, over 19800.00 frames. ], batch size: 200, lr: 3.65e-03, grad_scale: 16.0 +2023-03-29 16:32:27,852 INFO [train.py:917] (0/4) Computing validation loss +2023-03-29 16:33:04,435 INFO [train.py:926] (0/4) Epoch 43, validation: loss=0.1873, simple_loss=0.2496, pruned_loss=0.06254, over 2883724.00 frames. +2023-03-29 16:33:04,438 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-29 16:33:16,339 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77915.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:33:38,736 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.3403, 2.0345, 3.3995, 2.7874, 3.4828, 3.5260, 3.2569, 3.3620], + device='cuda:0'), covar=tensor([0.0954, 0.1238, 0.0147, 0.0448, 0.0179, 0.0256, 0.0241, 0.0224], + device='cuda:0'), in_proj_covar=tensor([0.0105, 0.0108, 0.0093, 0.0155, 0.0092, 0.0104, 0.0095, 0.0092], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 16:33:52,264 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77929.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:33:58,720 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.066e+02 3.128e+02 3.651e+02 4.371e+02 8.409e+02, threshold=7.303e+02, percent-clipped=1.0 +2023-03-29 16:34:01,655 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77933.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:35:13,096 INFO [train.py:892] (0/4) Epoch 43, batch 50, loss[loss=0.2274, simple_loss=0.3129, pruned_loss=0.07089, over 19382.00 frames. ], tot_loss[loss=0.1468, simple_loss=0.2273, pruned_loss=0.03316, over 890013.86 frames. ], batch size: 431, lr: 3.65e-03, grad_scale: 16.0 +2023-03-29 16:35:44,391 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77972.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:35:55,550 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77977.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:36:04,773 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77981.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:36:54,841 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-78000.pt +2023-03-29 16:37:24,589 INFO [train.py:892] (0/4) Epoch 43, batch 100, loss[loss=0.1591, simple_loss=0.2323, pruned_loss=0.04293, over 19825.00 frames. ], tot_loss[loss=0.1479, simple_loss=0.2284, pruned_loss=0.03369, over 1569086.17 frames. ], batch size: 128, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:37:32,332 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-29 16:38:13,994 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.210e+02 3.457e+02 4.175e+02 5.002e+02 8.622e+02, threshold=8.350e+02, percent-clipped=3.0 +2023-03-29 16:38:19,746 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78033.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:39:26,776 INFO [train.py:892] (0/4) Epoch 43, batch 150, loss[loss=0.1447, simple_loss=0.2318, pruned_loss=0.02881, over 19882.00 frames. ], tot_loss[loss=0.1462, simple_loss=0.2276, pruned_loss=0.0324, over 2097127.03 frames. ], batch size: 47, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:41:34,991 INFO [train.py:892] (0/4) Epoch 43, batch 200, loss[loss=0.1483, simple_loss=0.2273, pruned_loss=0.03464, over 19821.00 frames. ], tot_loss[loss=0.147, simple_loss=0.2288, pruned_loss=0.03264, over 2508590.20 frames. ], batch size: 72, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:42:26,474 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.115e+02 3.506e+02 4.148e+02 4.888e+02 9.722e+02, threshold=8.295e+02, percent-clipped=1.0 +2023-03-29 16:42:32,020 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.0302, 2.0016, 3.0663, 2.4174, 3.2221, 3.2425, 2.9004, 3.1249], + device='cuda:0'), covar=tensor([0.1224, 0.1329, 0.0142, 0.0429, 0.0178, 0.0253, 0.0289, 0.0221], + device='cuda:0'), in_proj_covar=tensor([0.0105, 0.0108, 0.0093, 0.0156, 0.0092, 0.0105, 0.0095, 0.0092], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 16:42:49,272 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.73 vs. limit=5.0 +2023-03-29 16:42:51,449 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7919, 3.5389, 3.9394, 3.1155, 4.1500, 3.3822, 3.6773, 3.9683], + device='cuda:0'), covar=tensor([0.0791, 0.0445, 0.0454, 0.0685, 0.0348, 0.0426, 0.0421, 0.0309], + device='cuda:0'), in_proj_covar=tensor([0.0085, 0.0094, 0.0090, 0.0114, 0.0085, 0.0089, 0.0087, 0.0082], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 16:43:34,477 INFO [train.py:892] (0/4) Epoch 43, batch 250, loss[loss=0.1517, simple_loss=0.2316, pruned_loss=0.03595, over 19750.00 frames. ], tot_loss[loss=0.1472, simple_loss=0.2294, pruned_loss=0.03251, over 2828649.55 frames. ], batch size: 273, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:44:12,073 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9250, 2.8551, 2.9978, 2.3806, 2.9789, 2.5721, 2.9969, 2.9409], + device='cuda:0'), covar=tensor([0.0545, 0.0473, 0.0492, 0.0835, 0.0428, 0.0584, 0.0457, 0.0451], + device='cuda:0'), in_proj_covar=tensor([0.0084, 0.0093, 0.0090, 0.0114, 0.0085, 0.0089, 0.0086, 0.0081], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 16:45:33,931 INFO [train.py:892] (0/4) Epoch 43, batch 300, loss[loss=0.1515, simple_loss=0.2446, pruned_loss=0.02922, over 19651.00 frames. ], tot_loss[loss=0.1484, simple_loss=0.2309, pruned_loss=0.03296, over 3075887.35 frames. ], batch size: 57, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:46:28,805 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.098e+02 3.467e+02 4.146e+02 4.960e+02 1.292e+03, threshold=8.293e+02, percent-clipped=2.0 +2023-03-29 16:46:29,783 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78232.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:47:40,847 INFO [train.py:892] (0/4) Epoch 43, batch 350, loss[loss=0.1466, simple_loss=0.2175, pruned_loss=0.03783, over 19797.00 frames. ], tot_loss[loss=0.1507, simple_loss=0.2327, pruned_loss=0.03433, over 3268997.36 frames. ], batch size: 45, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:48:08,285 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78272.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:48:08,433 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6348, 2.9792, 2.5395, 2.1692, 2.6454, 2.9104, 2.8541, 2.8946], + device='cuda:0'), covar=tensor([0.0398, 0.0285, 0.0367, 0.0565, 0.0416, 0.0266, 0.0288, 0.0276], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0108, 0.0110, 0.0109, 0.0112, 0.0098, 0.0099, 0.0098], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 16:48:59,184 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78293.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 16:49:45,823 INFO [train.py:892] (0/4) Epoch 43, batch 400, loss[loss=0.1338, simple_loss=0.2175, pruned_loss=0.02502, over 19598.00 frames. ], tot_loss[loss=0.1505, simple_loss=0.2321, pruned_loss=0.03439, over 3421042.37 frames. ], batch size: 45, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:50:09,005 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78320.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:50:25,900 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.27 vs. limit=5.0 +2023-03-29 16:50:37,820 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.238e+02 3.252e+02 3.876e+02 4.451e+02 9.041e+02, threshold=7.752e+02, percent-clipped=1.0 +2023-03-29 16:50:42,508 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78333.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:51:24,055 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.6018, 5.0845, 5.2190, 4.9664, 5.5534, 3.4842, 4.3210, 3.0747], + device='cuda:0'), covar=tensor([0.0158, 0.0193, 0.0133, 0.0180, 0.0115, 0.0819, 0.0930, 0.1208], + device='cuda:0'), in_proj_covar=tensor([0.0109, 0.0152, 0.0118, 0.0140, 0.0123, 0.0138, 0.0145, 0.0132], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 16:51:50,610 INFO [train.py:892] (0/4) Epoch 43, batch 450, loss[loss=0.1323, simple_loss=0.2121, pruned_loss=0.02619, over 19847.00 frames. ], tot_loss[loss=0.1496, simple_loss=0.2313, pruned_loss=0.034, over 3538670.02 frames. ], batch size: 109, lr: 3.64e-03, grad_scale: 16.0 +2023-03-29 16:52:42,352 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78381.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:53:55,326 INFO [train.py:892] (0/4) Epoch 43, batch 500, loss[loss=0.1395, simple_loss=0.2183, pruned_loss=0.03036, over 19482.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.23, pruned_loss=0.03385, over 3629734.14 frames. ], batch size: 43, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 16:54:44,910 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.422e+02 3.338e+02 4.015e+02 4.887e+02 8.174e+02, threshold=8.030e+02, percent-clipped=1.0 +2023-03-29 16:55:43,943 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6082, 2.2973, 3.7930, 3.2451, 3.7782, 3.8489, 3.5982, 3.6242], + device='cuda:0'), covar=tensor([0.0827, 0.1121, 0.0128, 0.0557, 0.0175, 0.0264, 0.0243, 0.0206], + device='cuda:0'), in_proj_covar=tensor([0.0105, 0.0107, 0.0092, 0.0154, 0.0091, 0.0104, 0.0094, 0.0091], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 16:55:57,173 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78460.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:55:58,346 INFO [train.py:892] (0/4) Epoch 43, batch 550, loss[loss=0.1453, simple_loss=0.2354, pruned_loss=0.02757, over 19832.00 frames. ], tot_loss[loss=0.1492, simple_loss=0.2301, pruned_loss=0.03414, over 3702659.29 frames. ], batch size: 52, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 16:56:38,629 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.8016, 6.0636, 6.1425, 5.9738, 5.8862, 6.0845, 5.4445, 5.4946], + device='cuda:0'), covar=tensor([0.0430, 0.0432, 0.0452, 0.0422, 0.0510, 0.0476, 0.0724, 0.0991], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0312, 0.0321, 0.0281, 0.0290, 0.0274, 0.0285, 0.0335], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 16:58:01,189 INFO [train.py:892] (0/4) Epoch 43, batch 600, loss[loss=0.1462, simple_loss=0.2317, pruned_loss=0.03037, over 19579.00 frames. ], tot_loss[loss=0.1501, simple_loss=0.2308, pruned_loss=0.03466, over 3757596.39 frames. ], batch size: 53, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 16:58:29,214 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78521.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 16:58:57,256 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.184e+02 3.257e+02 4.249e+02 5.407e+02 1.207e+03, threshold=8.497e+02, percent-clipped=3.0 +2023-03-29 16:59:52,087 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78556.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:00:06,555 INFO [train.py:892] (0/4) Epoch 43, batch 650, loss[loss=0.1539, simple_loss=0.2355, pruned_loss=0.03616, over 19768.00 frames. ], tot_loss[loss=0.15, simple_loss=0.2307, pruned_loss=0.03463, over 3800197.53 frames. ], batch size: 273, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 17:01:08,088 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78588.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 17:01:26,675 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-29 17:02:02,170 INFO [train.py:892] (0/4) Epoch 43, batch 700, loss[loss=0.1895, simple_loss=0.2814, pruned_loss=0.04875, over 19610.00 frames. ], tot_loss[loss=0.15, simple_loss=0.2308, pruned_loss=0.0346, over 3834804.55 frames. ], batch size: 359, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 17:02:16,453 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78617.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:02:54,401 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.290e+02 3.555e+02 4.216e+02 4.848e+02 9.327e+02, threshold=8.432e+02, percent-clipped=1.0 +2023-03-29 17:03:59,434 INFO [train.py:892] (0/4) Epoch 43, batch 750, loss[loss=0.1331, simple_loss=0.208, pruned_loss=0.02912, over 19743.00 frames. ], tot_loss[loss=0.1495, simple_loss=0.2306, pruned_loss=0.03423, over 3859558.66 frames. ], batch size: 139, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 17:04:15,181 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3767, 5.6830, 5.7476, 5.6008, 5.4317, 5.7125, 5.1276, 5.1777], + device='cuda:0'), covar=tensor([0.0466, 0.0469, 0.0417, 0.0472, 0.0499, 0.0437, 0.0686, 0.0956], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0312, 0.0320, 0.0281, 0.0290, 0.0274, 0.0285, 0.0334], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 17:05:07,533 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.48 vs. limit=5.0 +2023-03-29 17:05:55,906 INFO [train.py:892] (0/4) Epoch 43, batch 800, loss[loss=0.138, simple_loss=0.2337, pruned_loss=0.02112, over 19814.00 frames. ], tot_loss[loss=0.1482, simple_loss=0.2294, pruned_loss=0.03348, over 3880577.95 frames. ], batch size: 50, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 17:06:42,293 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.297e+02 3.365e+02 3.945e+02 4.793e+02 1.520e+03, threshold=7.889e+02, percent-clipped=1.0 +2023-03-29 17:07:20,454 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0617, 3.1341, 2.0446, 3.2038, 3.2463, 1.6734, 2.7381, 2.6125], + device='cuda:0'), covar=tensor([0.0898, 0.0850, 0.2588, 0.0786, 0.0710, 0.2459, 0.1151, 0.1022], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0273, 0.0242, 0.0293, 0.0271, 0.0211, 0.0248, 0.0213], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 17:07:27,141 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.0067, 2.2553, 2.0529, 1.5023, 2.1029, 2.1756, 2.0930, 2.1894], + device='cuda:0'), covar=tensor([0.0475, 0.0352, 0.0389, 0.0604, 0.0429, 0.0386, 0.0360, 0.0318], + device='cuda:0'), in_proj_covar=tensor([0.0114, 0.0107, 0.0108, 0.0108, 0.0111, 0.0097, 0.0098, 0.0097], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 17:07:46,791 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2259, 2.7698, 4.5107, 3.9156, 4.2635, 4.4761, 4.2738, 4.1849], + device='cuda:0'), covar=tensor([0.0639, 0.0956, 0.0109, 0.0674, 0.0185, 0.0228, 0.0186, 0.0179], + device='cuda:0'), in_proj_covar=tensor([0.0105, 0.0107, 0.0092, 0.0154, 0.0091, 0.0104, 0.0094, 0.0091], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 17:07:52,414 INFO [train.py:892] (0/4) Epoch 43, batch 850, loss[loss=0.1428, simple_loss=0.2235, pruned_loss=0.03101, over 19742.00 frames. ], tot_loss[loss=0.1492, simple_loss=0.2308, pruned_loss=0.03377, over 3895950.93 frames. ], batch size: 89, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 17:09:09,382 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3788, 3.5136, 2.1366, 4.0428, 3.7640, 3.9981, 4.0256, 3.1925], + device='cuda:0'), covar=tensor([0.0653, 0.0670, 0.1589, 0.0625, 0.0549, 0.0471, 0.0699, 0.0830], + device='cuda:0'), in_proj_covar=tensor([0.0151, 0.0151, 0.0148, 0.0162, 0.0142, 0.0146, 0.0157, 0.0155], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-29 17:09:51,493 INFO [train.py:892] (0/4) Epoch 43, batch 900, loss[loss=0.1341, simple_loss=0.2056, pruned_loss=0.03134, over 19834.00 frames. ], tot_loss[loss=0.1495, simple_loss=0.2312, pruned_loss=0.03388, over 3906158.37 frames. ], batch size: 121, lr: 3.63e-03, grad_scale: 16.0 +2023-03-29 17:10:05,173 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78816.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:10:39,579 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.520e+02 3.462e+02 4.060e+02 5.051e+02 1.168e+03, threshold=8.120e+02, percent-clipped=4.0 +2023-03-29 17:10:40,603 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8207, 4.5733, 5.2101, 4.7134, 4.2538, 4.9988, 4.8510, 5.4188], + device='cuda:0'), covar=tensor([0.0934, 0.0468, 0.0383, 0.0401, 0.0807, 0.0450, 0.0498, 0.0303], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0233, 0.0233, 0.0245, 0.0215, 0.0257, 0.0246, 0.0231], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 17:11:18,625 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9862, 3.9642, 4.3063, 4.0637, 4.2501, 3.9286, 4.0659, 3.8404], + device='cuda:0'), covar=tensor([0.1484, 0.2010, 0.0979, 0.1420, 0.1137, 0.1051, 0.1938, 0.2265], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0357, 0.0383, 0.0320, 0.0294, 0.0299, 0.0378, 0.0407], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 17:11:49,222 INFO [train.py:892] (0/4) Epoch 43, batch 950, loss[loss=0.1545, simple_loss=0.2488, pruned_loss=0.03007, over 19824.00 frames. ], tot_loss[loss=0.1502, simple_loss=0.2321, pruned_loss=0.03412, over 3915288.87 frames. ], batch size: 57, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:12:56,192 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78888.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:13:38,423 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5397, 2.6760, 3.9707, 3.1663, 3.2465, 3.0746, 2.3003, 2.4288], + device='cuda:0'), covar=tensor([0.1197, 0.3098, 0.0598, 0.1176, 0.1878, 0.1616, 0.2803, 0.2988], + device='cuda:0'), in_proj_covar=tensor([0.0358, 0.0403, 0.0357, 0.0297, 0.0380, 0.0399, 0.0390, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 17:13:45,671 INFO [train.py:892] (0/4) Epoch 43, batch 1000, loss[loss=0.1433, simple_loss=0.2282, pruned_loss=0.02919, over 19568.00 frames. ], tot_loss[loss=0.1493, simple_loss=0.2311, pruned_loss=0.03375, over 3924586.41 frames. ], batch size: 53, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:13:48,965 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78912.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:14:35,083 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.508e+02 3.436e+02 3.973e+02 4.908e+02 9.421e+02, threshold=7.947e+02, percent-clipped=2.0 +2023-03-29 17:14:44,285 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78936.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:15:33,019 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1095, 4.2059, 2.5244, 4.4195, 4.6082, 2.1144, 3.8289, 3.5312], + device='cuda:0'), covar=tensor([0.0756, 0.0785, 0.2804, 0.0714, 0.0611, 0.2970, 0.1105, 0.0983], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0269, 0.0239, 0.0290, 0.0270, 0.0208, 0.0245, 0.0211], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 17:15:39,029 INFO [train.py:892] (0/4) Epoch 43, batch 1050, loss[loss=0.1398, simple_loss=0.2251, pruned_loss=0.02727, over 19745.00 frames. ], tot_loss[loss=0.1493, simple_loss=0.231, pruned_loss=0.03378, over 3930763.12 frames. ], batch size: 84, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:15:48,748 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78965.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:16:35,975 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-29 17:17:33,126 INFO [train.py:892] (0/4) Epoch 43, batch 1100, loss[loss=0.1325, simple_loss=0.2084, pruned_loss=0.02825, over 19853.00 frames. ], tot_loss[loss=0.1503, simple_loss=0.2318, pruned_loss=0.03438, over 3933279.44 frames. ], batch size: 115, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:18:09,687 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79026.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:18:23,402 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.685e+02 3.705e+02 4.386e+02 5.336e+02 1.015e+03, threshold=8.773e+02, percent-clipped=4.0 +2023-03-29 17:19:32,149 INFO [train.py:892] (0/4) Epoch 43, batch 1150, loss[loss=0.1537, simple_loss=0.243, pruned_loss=0.03223, over 19793.00 frames. ], tot_loss[loss=0.1507, simple_loss=0.232, pruned_loss=0.03469, over 3936273.71 frames. ], batch size: 111, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:20:47,386 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9951, 3.3070, 3.4571, 3.9416, 2.8229, 3.2711, 2.5256, 2.5446], + device='cuda:0'), covar=tensor([0.0519, 0.1724, 0.0966, 0.0453, 0.1888, 0.0899, 0.1372, 0.1640], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0325, 0.0254, 0.0213, 0.0249, 0.0216, 0.0224, 0.0220], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 17:21:27,649 INFO [train.py:892] (0/4) Epoch 43, batch 1200, loss[loss=0.1363, simple_loss=0.2161, pruned_loss=0.02821, over 19849.00 frames. ], tot_loss[loss=0.1506, simple_loss=0.2322, pruned_loss=0.03451, over 3938841.71 frames. ], batch size: 118, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:21:40,515 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79116.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:22:16,942 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.729e+02 3.557e+02 4.069e+02 4.739e+02 8.583e+02, threshold=8.137e+02, percent-clipped=0.0 +2023-03-29 17:22:26,973 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.3673, 5.0738, 5.0649, 5.3439, 5.0114, 5.6078, 5.4766, 5.6901], + device='cuda:0'), covar=tensor([0.0658, 0.0384, 0.0468, 0.0383, 0.0792, 0.0403, 0.0519, 0.0322], + device='cuda:0'), in_proj_covar=tensor([0.0163, 0.0190, 0.0212, 0.0188, 0.0187, 0.0169, 0.0163, 0.0210], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 17:23:12,964 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.1200, 4.8960, 4.9134, 5.1430, 4.8219, 5.3810, 5.3160, 5.4600], + device='cuda:0'), covar=tensor([0.0756, 0.0429, 0.0501, 0.0416, 0.0836, 0.0472, 0.0456, 0.0328], + device='cuda:0'), in_proj_covar=tensor([0.0163, 0.0190, 0.0212, 0.0188, 0.0187, 0.0169, 0.0163, 0.0210], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 17:23:25,135 INFO [train.py:892] (0/4) Epoch 43, batch 1250, loss[loss=0.1456, simple_loss=0.2226, pruned_loss=0.03428, over 19840.00 frames. ], tot_loss[loss=0.1499, simple_loss=0.232, pruned_loss=0.03396, over 3940600.50 frames. ], batch size: 160, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:23:32,211 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79164.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:23:49,119 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-03-29 17:25:19,898 INFO [train.py:892] (0/4) Epoch 43, batch 1300, loss[loss=0.1326, simple_loss=0.207, pruned_loss=0.02913, over 19769.00 frames. ], tot_loss[loss=0.1513, simple_loss=0.2331, pruned_loss=0.03468, over 3942464.28 frames. ], batch size: 155, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:25:22,868 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79212.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:25:35,240 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8088, 4.6368, 5.1680, 4.6824, 4.1936, 4.8849, 4.8180, 5.2894], + device='cuda:0'), covar=tensor([0.0814, 0.0394, 0.0355, 0.0366, 0.0944, 0.0526, 0.0462, 0.0311], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0231, 0.0231, 0.0243, 0.0213, 0.0255, 0.0244, 0.0229], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 17:25:59,458 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-03-29 17:26:08,979 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.005e+02 3.409e+02 4.094e+02 5.234e+02 1.153e+03, threshold=8.189e+02, percent-clipped=5.0 +2023-03-29 17:26:57,349 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-29 17:27:14,214 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79260.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:27:15,398 INFO [train.py:892] (0/4) Epoch 43, batch 1350, loss[loss=0.153, simple_loss=0.2302, pruned_loss=0.03788, over 19887.00 frames. ], tot_loss[loss=0.1516, simple_loss=0.2336, pruned_loss=0.03473, over 3943906.12 frames. ], batch size: 61, lr: 3.62e-03, grad_scale: 16.0 +2023-03-29 17:28:09,778 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79285.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 17:28:55,611 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1140, 3.2015, 3.3394, 3.3152, 3.1501, 3.2650, 3.0887, 3.3562], + device='cuda:0'), covar=tensor([0.0348, 0.0347, 0.0376, 0.0285, 0.0395, 0.0314, 0.0382, 0.0335], + device='cuda:0'), in_proj_covar=tensor([0.0097, 0.0091, 0.0094, 0.0088, 0.0101, 0.0094, 0.0110, 0.0082], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 17:29:06,871 INFO [train.py:892] (0/4) Epoch 43, batch 1400, loss[loss=0.139, simple_loss=0.2223, pruned_loss=0.02788, over 19756.00 frames. ], tot_loss[loss=0.1502, simple_loss=0.2321, pruned_loss=0.03418, over 3946224.86 frames. ], batch size: 188, lr: 3.61e-03, grad_scale: 16.0 +2023-03-29 17:29:30,181 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79321.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:29:50,125 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-29 17:29:56,355 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.584e+02 3.305e+02 4.038e+02 5.143e+02 6.996e+02, threshold=8.075e+02, percent-clipped=0.0 +2023-03-29 17:29:59,611 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79333.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:30:23,314 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7874, 5.0087, 5.0481, 4.9532, 4.7638, 5.0258, 4.5879, 4.5766], + device='cuda:0'), covar=tensor([0.0474, 0.0540, 0.0480, 0.0438, 0.0593, 0.0489, 0.0631, 0.1021], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0314, 0.0322, 0.0279, 0.0291, 0.0274, 0.0286, 0.0336], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 17:30:29,491 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79346.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 17:31:04,342 INFO [train.py:892] (0/4) Epoch 43, batch 1450, loss[loss=0.1449, simple_loss=0.2141, pruned_loss=0.03788, over 19782.00 frames. ], tot_loss[loss=0.1502, simple_loss=0.2321, pruned_loss=0.03413, over 3947518.64 frames. ], batch size: 168, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:31:12,918 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79364.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:31:37,422 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79375.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 17:32:19,995 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79394.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:32:48,069 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-03-29 17:32:59,382 INFO [train.py:892] (0/4) Epoch 43, batch 1500, loss[loss=0.1287, simple_loss=0.2053, pruned_loss=0.02605, over 19767.00 frames. ], tot_loss[loss=0.1503, simple_loss=0.2322, pruned_loss=0.03422, over 3947501.84 frames. ], batch size: 155, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:33:29,201 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79425.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:33:41,754 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.542e+02 3.517e+02 4.248e+02 5.342e+02 8.488e+02, threshold=8.496e+02, percent-clipped=1.0 +2023-03-29 17:33:51,316 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79436.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 17:34:48,452 INFO [train.py:892] (0/4) Epoch 43, batch 1550, loss[loss=0.1267, simple_loss=0.2071, pruned_loss=0.02316, over 19804.00 frames. ], tot_loss[loss=0.15, simple_loss=0.2322, pruned_loss=0.03392, over 3946914.67 frames. ], batch size: 111, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:36:08,531 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0098, 5.2982, 5.3406, 5.1706, 4.9793, 5.2817, 4.7965, 4.7693], + device='cuda:0'), covar=tensor([0.0493, 0.0444, 0.0421, 0.0487, 0.0576, 0.0493, 0.0678, 0.1003], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0313, 0.0321, 0.0279, 0.0290, 0.0274, 0.0285, 0.0335], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 17:36:45,448 INFO [train.py:892] (0/4) Epoch 43, batch 1600, loss[loss=0.1478, simple_loss=0.2331, pruned_loss=0.03127, over 19717.00 frames. ], tot_loss[loss=0.1493, simple_loss=0.2315, pruned_loss=0.03359, over 3949529.22 frames. ], batch size: 61, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:37:34,253 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.484e+02 3.446e+02 3.965e+02 4.827e+02 6.779e+02, threshold=7.930e+02, percent-clipped=0.0 +2023-03-29 17:37:40,509 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-03-29 17:37:55,104 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2064, 5.4574, 5.5152, 5.3769, 5.2024, 5.4538, 4.9785, 4.9801], + device='cuda:0'), covar=tensor([0.0413, 0.0454, 0.0420, 0.0399, 0.0548, 0.0460, 0.0650, 0.0829], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0312, 0.0321, 0.0279, 0.0290, 0.0273, 0.0285, 0.0334], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 17:37:55,644 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-29 17:38:42,076 INFO [train.py:892] (0/4) Epoch 43, batch 1650, loss[loss=0.2093, simple_loss=0.2892, pruned_loss=0.06466, over 19608.00 frames. ], tot_loss[loss=0.149, simple_loss=0.2315, pruned_loss=0.0332, over 3945449.90 frames. ], batch size: 367, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:39:40,467 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79585.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:39:57,675 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5404, 2.8899, 3.0172, 3.3709, 2.3547, 2.9554, 2.2265, 2.2671], + device='cuda:0'), covar=tensor([0.0567, 0.1479, 0.1049, 0.0537, 0.2047, 0.0820, 0.1371, 0.1627], + device='cuda:0'), in_proj_covar=tensor([0.0254, 0.0329, 0.0256, 0.0214, 0.0252, 0.0217, 0.0225, 0.0222], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 17:40:37,516 INFO [train.py:892] (0/4) Epoch 43, batch 1700, loss[loss=0.1516, simple_loss=0.2363, pruned_loss=0.03345, over 19706.00 frames. ], tot_loss[loss=0.1484, simple_loss=0.2307, pruned_loss=0.03309, over 3947486.88 frames. ], batch size: 81, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:41:01,051 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79621.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:41:26,787 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.071e+02 3.435e+02 4.157e+02 5.071e+02 8.490e+02, threshold=8.314e+02, percent-clipped=3.0 +2023-03-29 17:41:49,408 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79641.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 17:42:00,330 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79646.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:42:21,405 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79657.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:42:27,350 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.3773, 2.7024, 2.4250, 1.9657, 2.5307, 2.6416, 2.6246, 2.7129], + device='cuda:0'), covar=tensor([0.0460, 0.0378, 0.0386, 0.0644, 0.0409, 0.0372, 0.0376, 0.0302], + device='cuda:0'), in_proj_covar=tensor([0.0114, 0.0107, 0.0108, 0.0108, 0.0111, 0.0097, 0.0098, 0.0097], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 17:42:28,912 INFO [train.py:892] (0/4) Epoch 43, batch 1750, loss[loss=0.151, simple_loss=0.2298, pruned_loss=0.03606, over 19817.00 frames. ], tot_loss[loss=0.1493, simple_loss=0.2312, pruned_loss=0.03374, over 3947074.29 frames. ], batch size: 133, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:42:46,693 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79669.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:43:29,885 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79689.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:44:13,218 INFO [train.py:892] (0/4) Epoch 43, batch 1800, loss[loss=0.1647, simple_loss=0.2502, pruned_loss=0.03959, over 19702.00 frames. ], tot_loss[loss=0.1491, simple_loss=0.2308, pruned_loss=0.03364, over 3946735.37 frames. ], batch size: 305, lr: 3.61e-03, grad_scale: 32.0 +2023-03-29 17:44:26,118 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79718.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:44:31,053 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79720.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:44:52,820 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79731.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 17:44:53,950 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.513e+02 3.637e+02 4.285e+02 5.199e+02 9.167e+02, threshold=8.570e+02, percent-clipped=2.0 +2023-03-29 17:45:48,630 INFO [train.py:892] (0/4) Epoch 43, batch 1850, loss[loss=0.1714, simple_loss=0.2645, pruned_loss=0.0391, over 19680.00 frames. ], tot_loss[loss=0.15, simple_loss=0.2326, pruned_loss=0.03371, over 3946698.74 frames. ], batch size: 55, lr: 3.60e-03, grad_scale: 32.0 +2023-03-29 17:45:50,917 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79762.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:45:56,453 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-43.pt +2023-03-29 17:46:59,233 INFO [train.py:892] (0/4) Epoch 44, batch 0, loss[loss=0.1686, simple_loss=0.2527, pruned_loss=0.04225, over 19706.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.2527, pruned_loss=0.04225, over 19706.00 frames. ], batch size: 310, lr: 3.56e-03, grad_scale: 32.0 +2023-03-29 17:46:59,234 INFO [train.py:917] (0/4) Computing validation loss +2023-03-29 17:47:18,656 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0705, 4.0971, 2.4173, 4.2577, 4.4327, 2.0323, 3.7228, 3.3638], + device='cuda:0'), covar=tensor([0.0677, 0.0782, 0.3010, 0.0764, 0.0657, 0.2819, 0.1004, 0.0931], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0271, 0.0241, 0.0293, 0.0272, 0.0210, 0.0247, 0.0212], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 17:47:24,841 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8442, 2.7770, 5.0052, 4.1864, 4.7195, 4.9246, 4.6382, 4.6181], + device='cuda:0'), covar=tensor([0.0553, 0.1117, 0.0096, 0.0753, 0.0146, 0.0185, 0.0165, 0.0160], + device='cuda:0'), in_proj_covar=tensor([0.0105, 0.0108, 0.0093, 0.0156, 0.0091, 0.0104, 0.0095, 0.0091], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 17:47:38,449 INFO [train.py:926] (0/4) Epoch 44, validation: loss=0.1877, simple_loss=0.2498, pruned_loss=0.06277, over 2883724.00 frames. +2023-03-29 17:47:38,450 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-29 17:49:41,871 INFO [train.py:892] (0/4) Epoch 44, batch 50, loss[loss=0.1365, simple_loss=0.2052, pruned_loss=0.03386, over 19844.00 frames. ], tot_loss[loss=0.1478, simple_loss=0.2281, pruned_loss=0.03377, over 891663.65 frames. ], batch size: 165, lr: 3.56e-03, grad_scale: 32.0 +2023-03-29 17:49:58,277 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79823.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:50:15,362 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.212e+02 3.427e+02 4.163e+02 5.028e+02 9.332e+02, threshold=8.326e+02, percent-clipped=1.0 +2023-03-29 17:51:32,091 INFO [train.py:892] (0/4) Epoch 44, batch 100, loss[loss=0.1569, simple_loss=0.2346, pruned_loss=0.03956, over 19781.00 frames. ], tot_loss[loss=0.1459, simple_loss=0.2267, pruned_loss=0.03256, over 1571198.35 frames. ], batch size: 193, lr: 3.56e-03, grad_scale: 32.0 +2023-03-29 17:51:58,161 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-29 17:52:09,762 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-03-29 17:52:12,058 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0634, 5.2169, 5.4920, 5.1931, 5.2870, 5.0829, 5.2098, 4.9625], + device='cuda:0'), covar=tensor([0.1399, 0.1427, 0.0727, 0.1294, 0.0665, 0.0826, 0.1699, 0.2001], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0354, 0.0382, 0.0317, 0.0293, 0.0297, 0.0375, 0.0406], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 17:52:14,365 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8537, 2.8511, 3.0062, 2.9712, 2.8475, 2.8775, 2.7604, 3.1131], + device='cuda:0'), covar=tensor([0.0343, 0.0409, 0.0322, 0.0288, 0.0428, 0.0365, 0.0456, 0.0299], + device='cuda:0'), in_proj_covar=tensor([0.0097, 0.0091, 0.0094, 0.0088, 0.0100, 0.0094, 0.0109, 0.0082], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 17:53:02,053 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1917, 4.0747, 4.0775, 4.2909, 4.1316, 4.4905, 4.1464, 4.3384], + device='cuda:0'), covar=tensor([0.0914, 0.0593, 0.0717, 0.0527, 0.0793, 0.0587, 0.0725, 0.0681], + device='cuda:0'), in_proj_covar=tensor([0.0161, 0.0187, 0.0208, 0.0186, 0.0184, 0.0167, 0.0161, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 17:53:33,235 INFO [train.py:892] (0/4) Epoch 44, batch 150, loss[loss=0.1328, simple_loss=0.2097, pruned_loss=0.02791, over 19832.00 frames. ], tot_loss[loss=0.1475, simple_loss=0.2285, pruned_loss=0.03321, over 2098893.00 frames. ], batch size: 43, lr: 3.56e-03, grad_scale: 32.0 +2023-03-29 17:54:11,532 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.056e+02 3.280e+02 3.825e+02 4.551e+02 8.712e+02, threshold=7.650e+02, percent-clipped=2.0 +2023-03-29 17:54:33,921 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79941.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:54:33,968 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79941.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 17:54:37,859 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5889, 2.0249, 2.3693, 2.7846, 3.1479, 3.2161, 3.1058, 3.2102], + device='cuda:0'), covar=tensor([0.1093, 0.1859, 0.1513, 0.0813, 0.0562, 0.0401, 0.0488, 0.0532], + device='cuda:0'), in_proj_covar=tensor([0.0165, 0.0171, 0.0183, 0.0156, 0.0142, 0.0138, 0.0133, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 17:55:19,411 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3042, 4.6285, 2.6110, 4.8906, 5.0293, 2.2107, 4.0366, 3.4785], + device='cuda:0'), covar=tensor([0.0707, 0.0672, 0.2696, 0.0606, 0.0451, 0.2761, 0.1039, 0.0964], + device='cuda:0'), in_proj_covar=tensor([0.0241, 0.0267, 0.0238, 0.0289, 0.0268, 0.0207, 0.0244, 0.0209], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 17:55:37,674 INFO [train.py:892] (0/4) Epoch 44, batch 200, loss[loss=0.1275, simple_loss=0.2152, pruned_loss=0.01985, over 19745.00 frames. ], tot_loss[loss=0.1475, simple_loss=0.2289, pruned_loss=0.03308, over 2508703.34 frames. ], batch size: 106, lr: 3.56e-03, grad_scale: 32.0 +2023-03-29 17:56:34,702 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79989.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 17:56:34,737 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79989.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:57:00,981 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-80000.pt +2023-03-29 17:57:32,831 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80013.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:57:40,396 INFO [train.py:892] (0/4) Epoch 44, batch 250, loss[loss=0.1554, simple_loss=0.2338, pruned_loss=0.03851, over 19901.00 frames. ], tot_loss[loss=0.1476, simple_loss=0.229, pruned_loss=0.03308, over 2828247.18 frames. ], batch size: 62, lr: 3.56e-03, grad_scale: 16.0 +2023-03-29 17:57:53,527 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80020.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:58:17,202 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80031.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 17:58:20,263 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.104e+02 3.486e+02 4.096e+02 4.877e+02 1.090e+03, threshold=8.193e+02, percent-clipped=2.0 +2023-03-29 17:58:30,391 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80037.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 17:59:38,585 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4351, 3.5517, 2.3673, 4.0991, 3.8086, 4.1426, 4.1814, 3.3042], + device='cuda:0'), covar=tensor([0.0621, 0.0565, 0.1460, 0.0663, 0.0596, 0.0407, 0.0561, 0.0825], + device='cuda:0'), in_proj_covar=tensor([0.0151, 0.0151, 0.0147, 0.0161, 0.0141, 0.0147, 0.0156, 0.0155], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 17:59:39,668 INFO [train.py:892] (0/4) Epoch 44, batch 300, loss[loss=0.1378, simple_loss=0.2121, pruned_loss=0.03173, over 19727.00 frames. ], tot_loss[loss=0.148, simple_loss=0.2294, pruned_loss=0.03325, over 3076743.45 frames. ], batch size: 63, lr: 3.56e-03, grad_scale: 16.0 +2023-03-29 17:59:44,398 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80068.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:00:09,762 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80079.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 18:01:38,222 INFO [train.py:892] (0/4) Epoch 44, batch 350, loss[loss=0.1329, simple_loss=0.2134, pruned_loss=0.02616, over 19851.00 frames. ], tot_loss[loss=0.1493, simple_loss=0.2311, pruned_loss=0.03372, over 3267908.44 frames. ], batch size: 81, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:01:43,868 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80118.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:02:20,936 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.499e+02 3.590e+02 4.276e+02 5.127e+02 9.462e+02, threshold=8.552e+02, percent-clipped=1.0 +2023-03-29 18:03:14,240 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6838, 3.5235, 3.8272, 2.7614, 3.9117, 3.2458, 3.6024, 3.7421], + device='cuda:0'), covar=tensor([0.0604, 0.0490, 0.0533, 0.0848, 0.0420, 0.0464, 0.0431, 0.0361], + device='cuda:0'), in_proj_covar=tensor([0.0085, 0.0094, 0.0091, 0.0114, 0.0086, 0.0089, 0.0086, 0.0082], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 18:03:35,931 INFO [train.py:892] (0/4) Epoch 44, batch 400, loss[loss=0.1421, simple_loss=0.2138, pruned_loss=0.03517, over 19777.00 frames. ], tot_loss[loss=0.1488, simple_loss=0.2303, pruned_loss=0.03367, over 3420097.68 frames. ], batch size: 46, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:05:38,855 INFO [train.py:892] (0/4) Epoch 44, batch 450, loss[loss=0.2222, simple_loss=0.297, pruned_loss=0.07371, over 19617.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.2304, pruned_loss=0.03346, over 3537799.09 frames. ], batch size: 351, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:05:55,549 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8343, 3.5440, 3.8583, 2.8905, 3.9473, 3.3076, 3.5881, 3.8039], + device='cuda:0'), covar=tensor([0.0523, 0.0438, 0.0591, 0.0798, 0.0404, 0.0435, 0.0507, 0.0364], + device='cuda:0'), in_proj_covar=tensor([0.0085, 0.0094, 0.0091, 0.0114, 0.0086, 0.0089, 0.0086, 0.0082], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 18:06:18,567 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.753e+02 3.555e+02 3.976e+02 4.443e+02 7.591e+02, threshold=7.952e+02, percent-clipped=1.0 +2023-03-29 18:06:38,924 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80241.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:07:34,122 INFO [train.py:892] (0/4) Epoch 44, batch 500, loss[loss=0.1439, simple_loss=0.2179, pruned_loss=0.03499, over 19772.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.231, pruned_loss=0.03336, over 3625481.63 frames. ], batch size: 108, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:08:14,231 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80283.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:08:28,166 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80289.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:08:58,283 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2477, 3.5365, 3.0863, 2.7059, 3.0911, 3.4935, 3.4508, 3.4835], + device='cuda:0'), covar=tensor([0.0306, 0.0266, 0.0302, 0.0498, 0.0329, 0.0274, 0.0236, 0.0213], + device='cuda:0'), in_proj_covar=tensor([0.0115, 0.0108, 0.0108, 0.0109, 0.0111, 0.0098, 0.0099, 0.0098], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 18:09:27,765 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80313.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:09:33,216 INFO [train.py:892] (0/4) Epoch 44, batch 550, loss[loss=0.1685, simple_loss=0.2469, pruned_loss=0.04508, over 19759.00 frames. ], tot_loss[loss=0.149, simple_loss=0.231, pruned_loss=0.03351, over 3696948.81 frames. ], batch size: 276, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:09:47,139 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.46 vs. limit=5.0 +2023-03-29 18:10:13,740 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.312e+02 3.379e+02 4.084e+02 4.890e+02 8.912e+02, threshold=8.167e+02, percent-clipped=1.0 +2023-03-29 18:10:41,125 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80344.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:10:52,170 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.38 vs. limit=5.0 +2023-03-29 18:11:17,833 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8529, 4.6536, 5.1554, 4.7132, 4.1901, 4.8835, 4.7690, 5.2573], + device='cuda:0'), covar=tensor([0.0736, 0.0380, 0.0316, 0.0365, 0.0868, 0.0509, 0.0444, 0.0322], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0232, 0.0232, 0.0245, 0.0212, 0.0257, 0.0245, 0.0230], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 18:11:21,908 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80361.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:11:31,178 INFO [train.py:892] (0/4) Epoch 44, batch 600, loss[loss=0.1439, simple_loss=0.2157, pruned_loss=0.036, over 19828.00 frames. ], tot_loss[loss=0.149, simple_loss=0.2313, pruned_loss=0.03337, over 3752401.75 frames. ], batch size: 93, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:11:53,958 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3780, 2.6259, 4.3558, 3.8263, 4.2436, 4.3606, 4.1642, 4.0980], + device='cuda:0'), covar=tensor([0.0582, 0.1069, 0.0137, 0.0702, 0.0180, 0.0235, 0.0188, 0.0186], + device='cuda:0'), in_proj_covar=tensor([0.0106, 0.0109, 0.0093, 0.0157, 0.0092, 0.0105, 0.0096, 0.0093], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 18:12:34,633 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80393.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:13:26,997 INFO [train.py:892] (0/4) Epoch 44, batch 650, loss[loss=0.1593, simple_loss=0.2314, pruned_loss=0.04362, over 19769.00 frames. ], tot_loss[loss=0.1494, simple_loss=0.2311, pruned_loss=0.03384, over 3796925.61 frames. ], batch size: 152, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:13:32,678 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80418.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:14:07,144 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.620e+02 3.666e+02 4.287e+02 5.079e+02 9.209e+02, threshold=8.573e+02, percent-clipped=4.0 +2023-03-29 18:14:32,855 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-03-29 18:14:49,003 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9846, 2.7127, 5.1165, 4.3372, 4.7126, 4.9921, 4.9672, 4.6553], + device='cuda:0'), covar=tensor([0.0495, 0.1106, 0.0099, 0.0867, 0.0172, 0.0186, 0.0145, 0.0164], + device='cuda:0'), in_proj_covar=tensor([0.0106, 0.0108, 0.0093, 0.0157, 0.0092, 0.0105, 0.0096, 0.0092], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 18:14:55,246 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80454.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:15:14,706 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80463.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:15:20,976 INFO [train.py:892] (0/4) Epoch 44, batch 700, loss[loss=0.1865, simple_loss=0.2684, pruned_loss=0.05228, over 19739.00 frames. ], tot_loss[loss=0.1501, simple_loss=0.232, pruned_loss=0.03413, over 3830741.33 frames. ], batch size: 291, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:15:21,772 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80466.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:16:26,752 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80494.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:17:18,258 INFO [train.py:892] (0/4) Epoch 44, batch 750, loss[loss=0.1328, simple_loss=0.2134, pruned_loss=0.02607, over 19798.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.2303, pruned_loss=0.03357, over 3858878.26 frames. ], batch size: 67, lr: 3.55e-03, grad_scale: 16.0 +2023-03-29 18:17:38,238 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80524.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:17:57,765 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.299e+02 3.592e+02 4.231e+02 5.310e+02 9.600e+02, threshold=8.463e+02, percent-clipped=2.0 +2023-03-29 18:18:50,472 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80555.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:19:14,287 INFO [train.py:892] (0/4) Epoch 44, batch 800, loss[loss=0.1414, simple_loss=0.2198, pruned_loss=0.03152, over 19751.00 frames. ], tot_loss[loss=0.1492, simple_loss=0.2308, pruned_loss=0.03386, over 3879043.57 frames. ], batch size: 97, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:19:37,276 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4222, 4.1252, 4.2236, 4.3946, 4.0489, 4.4826, 4.5344, 4.7069], + device='cuda:0'), covar=tensor([0.0641, 0.0499, 0.0508, 0.0402, 0.0796, 0.0533, 0.0413, 0.0326], + device='cuda:0'), in_proj_covar=tensor([0.0161, 0.0187, 0.0208, 0.0186, 0.0183, 0.0167, 0.0161, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 18:20:31,005 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5860, 2.6396, 4.9141, 4.1800, 4.5783, 4.7947, 4.6511, 4.4997], + device='cuda:0'), covar=tensor([0.0669, 0.1135, 0.0106, 0.0825, 0.0180, 0.0217, 0.0180, 0.0199], + device='cuda:0'), in_proj_covar=tensor([0.0106, 0.0109, 0.0093, 0.0156, 0.0092, 0.0105, 0.0096, 0.0092], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 18:20:39,783 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.82 vs. limit=5.0 +2023-03-29 18:20:42,047 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-29 18:21:15,052 INFO [train.py:892] (0/4) Epoch 44, batch 850, loss[loss=0.1568, simple_loss=0.2435, pruned_loss=0.03511, over 19726.00 frames. ], tot_loss[loss=0.149, simple_loss=0.2306, pruned_loss=0.03369, over 3894493.02 frames. ], batch size: 269, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:21:25,880 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-29 18:21:31,583 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2442, 3.1604, 3.3421, 2.6924, 3.3928, 2.8125, 3.2744, 3.3394], + device='cuda:0'), covar=tensor([0.0611, 0.0486, 0.0620, 0.0776, 0.0427, 0.0548, 0.0439, 0.0421], + device='cuda:0'), in_proj_covar=tensor([0.0085, 0.0095, 0.0091, 0.0115, 0.0086, 0.0090, 0.0086, 0.0082], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 18:21:54,761 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.578e+02 3.349e+02 3.795e+02 4.586e+02 1.230e+03, threshold=7.591e+02, percent-clipped=1.0 +2023-03-29 18:22:09,671 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80639.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:22:31,588 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6595, 3.7778, 2.3466, 3.8886, 3.9789, 1.9498, 3.3437, 3.1560], + device='cuda:0'), covar=tensor([0.0787, 0.0835, 0.2803, 0.0843, 0.0734, 0.2639, 0.1154, 0.0930], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0270, 0.0241, 0.0291, 0.0272, 0.0210, 0.0247, 0.0212], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 18:23:11,194 INFO [train.py:892] (0/4) Epoch 44, batch 900, loss[loss=0.1589, simple_loss=0.2484, pruned_loss=0.03466, over 19769.00 frames. ], tot_loss[loss=0.1481, simple_loss=0.2297, pruned_loss=0.03323, over 3906227.04 frames. ], batch size: 88, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:24:18,570 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-03-29 18:24:55,971 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6399, 4.8784, 4.9500, 4.8171, 4.5757, 4.9318, 4.4468, 4.4538], + device='cuda:0'), covar=tensor([0.0508, 0.0558, 0.0518, 0.0481, 0.0676, 0.0486, 0.0681, 0.1071], + device='cuda:0'), in_proj_covar=tensor([0.0298, 0.0316, 0.0325, 0.0282, 0.0295, 0.0277, 0.0287, 0.0338], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 18:25:06,271 INFO [train.py:892] (0/4) Epoch 44, batch 950, loss[loss=0.1588, simple_loss=0.2424, pruned_loss=0.03762, over 19770.00 frames. ], tot_loss[loss=0.1479, simple_loss=0.2297, pruned_loss=0.03304, over 3914565.12 frames. ], batch size: 247, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:25:42,755 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0405, 3.1617, 2.0081, 3.1884, 3.2622, 1.6409, 2.7701, 2.5529], + device='cuda:0'), covar=tensor([0.0920, 0.0801, 0.2667, 0.0818, 0.0692, 0.2453, 0.1118, 0.1106], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0270, 0.0241, 0.0292, 0.0273, 0.0210, 0.0247, 0.0213], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 18:25:49,368 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.433e+02 3.452e+02 4.047e+02 4.823e+02 9.247e+02, threshold=8.094e+02, percent-clipped=4.0 +2023-03-29 18:25:56,586 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80736.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:26:26,136 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80749.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:27:06,649 INFO [train.py:892] (0/4) Epoch 44, batch 1000, loss[loss=0.1336, simple_loss=0.2137, pruned_loss=0.02673, over 19748.00 frames. ], tot_loss[loss=0.1486, simple_loss=0.2302, pruned_loss=0.03346, over 3922930.03 frames. ], batch size: 205, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:27:24,399 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80773.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:28:09,127 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7216, 2.6537, 2.8901, 2.5180, 3.0993, 3.0171, 3.5598, 3.8601], + device='cuda:0'), covar=tensor([0.0622, 0.1727, 0.1580, 0.2268, 0.1553, 0.1477, 0.0663, 0.0632], + device='cuda:0'), in_proj_covar=tensor([0.0262, 0.0246, 0.0274, 0.0263, 0.0307, 0.0266, 0.0240, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 18:28:18,927 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80797.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:29:01,270 INFO [train.py:892] (0/4) Epoch 44, batch 1050, loss[loss=0.1381, simple_loss=0.2092, pruned_loss=0.03353, over 19762.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.2304, pruned_loss=0.03348, over 3928307.12 frames. ], batch size: 182, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:29:09,717 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80819.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:29:41,369 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.092e+02 3.498e+02 3.993e+02 4.688e+02 1.348e+03, threshold=7.986e+02, percent-clipped=2.0 +2023-03-29 18:29:44,645 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80834.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:30:12,458 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7954, 2.7226, 3.0107, 2.6121, 3.1982, 3.1725, 3.6988, 4.0076], + device='cuda:0'), covar=tensor([0.0686, 0.1805, 0.1770, 0.2317, 0.1626, 0.1485, 0.0734, 0.0694], + device='cuda:0'), in_proj_covar=tensor([0.0262, 0.0245, 0.0273, 0.0262, 0.0306, 0.0266, 0.0239, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 18:30:21,923 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80850.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:30:57,644 INFO [train.py:892] (0/4) Epoch 44, batch 1100, loss[loss=0.1433, simple_loss=0.2241, pruned_loss=0.03121, over 19780.00 frames. ], tot_loss[loss=0.15, simple_loss=0.2319, pruned_loss=0.03398, over 3930708.82 frames. ], batch size: 211, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:31:27,278 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-03-29 18:32:07,883 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9971, 2.3711, 3.3453, 2.7263, 2.7725, 2.7089, 2.0249, 2.2262], + device='cuda:0'), covar=tensor([0.1405, 0.2946, 0.0690, 0.1204, 0.2098, 0.1643, 0.2792, 0.2725], + device='cuda:0'), in_proj_covar=tensor([0.0361, 0.0406, 0.0361, 0.0301, 0.0382, 0.0402, 0.0391, 0.0368], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 18:32:38,255 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.1581, 4.7062, 4.8559, 4.5990, 5.1342, 3.2415, 4.2490, 2.4044], + device='cuda:0'), covar=tensor([0.0225, 0.0273, 0.0178, 0.0235, 0.0190, 0.1137, 0.0852, 0.1795], + device='cuda:0'), in_proj_covar=tensor([0.0111, 0.0155, 0.0119, 0.0142, 0.0125, 0.0141, 0.0146, 0.0133], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-29 18:32:57,575 INFO [train.py:892] (0/4) Epoch 44, batch 1150, loss[loss=0.156, simple_loss=0.2465, pruned_loss=0.03269, over 19572.00 frames. ], tot_loss[loss=0.1494, simple_loss=0.2311, pruned_loss=0.03388, over 3933807.22 frames. ], batch size: 53, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:33:35,851 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.488e+02 3.446e+02 4.222e+02 4.835e+02 9.572e+02, threshold=8.444e+02, percent-clipped=1.0 +2023-03-29 18:33:49,589 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80939.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:34:56,381 INFO [train.py:892] (0/4) Epoch 44, batch 1200, loss[loss=0.1415, simple_loss=0.2227, pruned_loss=0.03018, over 19874.00 frames. ], tot_loss[loss=0.1496, simple_loss=0.2313, pruned_loss=0.03397, over 3937853.51 frames. ], batch size: 84, lr: 3.54e-03, grad_scale: 16.0 +2023-03-29 18:35:47,200 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80987.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:36:53,883 INFO [train.py:892] (0/4) Epoch 44, batch 1250, loss[loss=0.1271, simple_loss=0.2011, pruned_loss=0.02652, over 19829.00 frames. ], tot_loss[loss=0.1493, simple_loss=0.2308, pruned_loss=0.03388, over 3941626.76 frames. ], batch size: 143, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:37:34,761 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.062e+02 3.416e+02 3.866e+02 4.633e+02 7.617e+02, threshold=7.733e+02, percent-clipped=0.0 +2023-03-29 18:38:13,292 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81049.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:38:51,617 INFO [train.py:892] (0/4) Epoch 44, batch 1300, loss[loss=0.1413, simple_loss=0.2137, pruned_loss=0.03446, over 19801.00 frames. ], tot_loss[loss=0.149, simple_loss=0.231, pruned_loss=0.03353, over 3942179.74 frames. ], batch size: 174, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:39:18,967 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-29 18:39:51,949 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81092.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:40:04,665 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81097.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:40:48,679 INFO [train.py:892] (0/4) Epoch 44, batch 1350, loss[loss=0.2664, simple_loss=0.3329, pruned_loss=0.0999, over 19164.00 frames. ], tot_loss[loss=0.15, simple_loss=0.232, pruned_loss=0.03398, over 3942414.87 frames. ], batch size: 452, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:40:49,752 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81116.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:40:55,606 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81119.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:41:13,523 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.61 vs. limit=2.0 +2023-03-29 18:41:18,992 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81129.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:41:27,830 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.454e+02 3.446e+02 4.149e+02 4.801e+02 1.146e+03, threshold=8.297e+02, percent-clipped=2.0 +2023-03-29 18:42:04,433 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81150.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:42:39,504 INFO [train.py:892] (0/4) Epoch 44, batch 1400, loss[loss=0.1462, simple_loss=0.2245, pruned_loss=0.03394, over 19775.00 frames. ], tot_loss[loss=0.1492, simple_loss=0.2312, pruned_loss=0.0336, over 3943544.40 frames. ], batch size: 152, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:42:42,714 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81167.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:43:08,262 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81177.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:43:58,075 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81198.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:44:41,836 INFO [train.py:892] (0/4) Epoch 44, batch 1450, loss[loss=0.1415, simple_loss=0.2137, pruned_loss=0.03465, over 19827.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.2308, pruned_loss=0.03347, over 3945037.72 frames. ], batch size: 202, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:45:20,200 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.287e+02 3.615e+02 3.975e+02 5.064e+02 7.888e+02, threshold=7.950e+02, percent-clipped=0.0 +2023-03-29 18:45:56,416 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1363, 2.5518, 4.3893, 3.6941, 4.2130, 4.3463, 4.1533, 4.1144], + device='cuda:0'), covar=tensor([0.0711, 0.1135, 0.0130, 0.0794, 0.0188, 0.0237, 0.0199, 0.0187], + device='cuda:0'), in_proj_covar=tensor([0.0105, 0.0108, 0.0092, 0.0155, 0.0091, 0.0104, 0.0095, 0.0091], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 18:46:39,811 INFO [train.py:892] (0/4) Epoch 44, batch 1500, loss[loss=0.1315, simple_loss=0.2059, pruned_loss=0.02849, over 19798.00 frames. ], tot_loss[loss=0.1482, simple_loss=0.2303, pruned_loss=0.03303, over 3946380.66 frames. ], batch size: 114, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:48:01,811 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81299.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:48:05,932 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81301.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:48:19,428 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2307, 2.5965, 4.4740, 3.9414, 4.2867, 4.4338, 4.3275, 4.1681], + device='cuda:0'), covar=tensor([0.0689, 0.1111, 0.0119, 0.0651, 0.0161, 0.0227, 0.0160, 0.0179], + device='cuda:0'), in_proj_covar=tensor([0.0105, 0.0108, 0.0092, 0.0155, 0.0091, 0.0104, 0.0095, 0.0092], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 18:48:39,252 INFO [train.py:892] (0/4) Epoch 44, batch 1550, loss[loss=0.1762, simple_loss=0.2688, pruned_loss=0.04177, over 19652.00 frames. ], tot_loss[loss=0.1475, simple_loss=0.2298, pruned_loss=0.03259, over 3948438.34 frames. ], batch size: 57, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:49:19,851 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.242e+02 3.436e+02 3.963e+02 4.737e+02 1.006e+03, threshold=7.927e+02, percent-clipped=2.0 +2023-03-29 18:50:16,865 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6261, 2.9583, 2.6472, 2.2057, 2.7089, 2.8521, 2.8384, 2.8997], + device='cuda:0'), covar=tensor([0.0421, 0.0342, 0.0352, 0.0596, 0.0413, 0.0361, 0.0386, 0.0283], + device='cuda:0'), in_proj_covar=tensor([0.0115, 0.0108, 0.0108, 0.0108, 0.0111, 0.0098, 0.0099, 0.0098], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 18:50:23,778 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81360.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:50:28,367 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81362.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:50:36,255 INFO [train.py:892] (0/4) Epoch 44, batch 1600, loss[loss=0.17, simple_loss=0.258, pruned_loss=0.04105, over 19786.00 frames. ], tot_loss[loss=0.1474, simple_loss=0.2295, pruned_loss=0.03261, over 3950098.15 frames. ], batch size: 48, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:51:24,429 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-29 18:51:40,518 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81392.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:52:07,563 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5736, 2.6344, 1.6706, 2.9200, 2.7406, 2.8221, 2.9286, 2.4025], + device='cuda:0'), covar=tensor([0.0797, 0.0846, 0.1757, 0.0792, 0.0747, 0.0708, 0.0756, 0.1034], + device='cuda:0'), in_proj_covar=tensor([0.0152, 0.0151, 0.0148, 0.0162, 0.0142, 0.0148, 0.0157, 0.0155], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-29 18:52:37,031 INFO [train.py:892] (0/4) Epoch 44, batch 1650, loss[loss=0.1366, simple_loss=0.2223, pruned_loss=0.02548, over 19797.00 frames. ], tot_loss[loss=0.1474, simple_loss=0.2293, pruned_loss=0.03274, over 3948346.78 frames. ], batch size: 79, lr: 3.53e-03, grad_scale: 16.0 +2023-03-29 18:53:06,601 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81429.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:53:16,899 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.426e+02 3.380e+02 3.975e+02 4.611e+02 1.350e+03, threshold=7.949e+02, percent-clipped=2.0 +2023-03-29 18:53:33,529 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81440.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:54:32,122 INFO [train.py:892] (0/4) Epoch 44, batch 1700, loss[loss=0.1357, simple_loss=0.2244, pruned_loss=0.02351, over 19789.00 frames. ], tot_loss[loss=0.1475, simple_loss=0.2298, pruned_loss=0.03263, over 3949090.34 frames. ], batch size: 94, lr: 3.52e-03, grad_scale: 16.0 +2023-03-29 18:54:34,921 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9906, 2.4708, 4.0478, 3.4401, 3.9735, 4.0766, 3.8493, 3.8414], + device='cuda:0'), covar=tensor([0.0684, 0.1057, 0.0120, 0.0581, 0.0164, 0.0207, 0.0190, 0.0187], + device='cuda:0'), in_proj_covar=tensor([0.0105, 0.0108, 0.0092, 0.0155, 0.0091, 0.0105, 0.0095, 0.0092], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 18:54:49,117 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81472.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:54:59,833 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81477.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 18:56:28,370 INFO [train.py:892] (0/4) Epoch 44, batch 1750, loss[loss=0.1507, simple_loss=0.2285, pruned_loss=0.03643, over 19805.00 frames. ], tot_loss[loss=0.1482, simple_loss=0.2306, pruned_loss=0.03292, over 3945611.14 frames. ], batch size: 224, lr: 3.52e-03, grad_scale: 16.0 +2023-03-29 18:57:03,567 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.489e+02 3.354e+02 3.892e+02 4.607e+02 1.019e+03, threshold=7.783e+02, percent-clipped=2.0 +2023-03-29 18:58:09,167 INFO [train.py:892] (0/4) Epoch 44, batch 1800, loss[loss=0.1399, simple_loss=0.2264, pruned_loss=0.02671, over 19777.00 frames. ], tot_loss[loss=0.1495, simple_loss=0.2314, pruned_loss=0.03376, over 3947078.71 frames. ], batch size: 66, lr: 3.52e-03, grad_scale: 16.0 +2023-03-29 18:59:05,452 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.9865, 2.1542, 2.1073, 1.8270, 2.1870, 1.8216, 2.1286, 2.1491], + device='cuda:0'), covar=tensor([0.0539, 0.0547, 0.0511, 0.1009, 0.0510, 0.0631, 0.0561, 0.0458], + device='cuda:0'), in_proj_covar=tensor([0.0086, 0.0096, 0.0092, 0.0116, 0.0087, 0.0091, 0.0088, 0.0083], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 18:59:40,685 INFO [train.py:892] (0/4) Epoch 44, batch 1850, loss[loss=0.1422, simple_loss=0.2337, pruned_loss=0.02529, over 19586.00 frames. ], tot_loss[loss=0.1509, simple_loss=0.2337, pruned_loss=0.03408, over 3945410.80 frames. ], batch size: 53, lr: 3.52e-03, grad_scale: 16.0 +2023-03-29 18:59:48,290 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-44.pt +2023-03-29 19:00:42,762 INFO [train.py:892] (0/4) Epoch 45, batch 0, loss[loss=0.1299, simple_loss=0.2073, pruned_loss=0.02632, over 19634.00 frames. ], tot_loss[loss=0.1299, simple_loss=0.2073, pruned_loss=0.02632, over 19634.00 frames. ], batch size: 68, lr: 3.48e-03, grad_scale: 16.0 +2023-03-29 19:00:42,764 INFO [train.py:917] (0/4) Computing validation loss +2023-03-29 19:01:20,026 INFO [train.py:926] (0/4) Epoch 45, validation: loss=0.1889, simple_loss=0.2504, pruned_loss=0.0637, over 2883724.00 frames. +2023-03-29 19:01:20,029 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-29 19:01:47,733 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.448e+02 3.358e+02 4.011e+02 4.800e+02 7.460e+02, threshold=8.023e+02, percent-clipped=0.0 +2023-03-29 19:01:55,935 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-29 19:02:13,586 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-29 19:02:42,302 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81655.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:02:47,200 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81657.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:03:18,009 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81670.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:03:18,995 INFO [train.py:892] (0/4) Epoch 45, batch 50, loss[loss=0.1498, simple_loss=0.2224, pruned_loss=0.03862, over 19781.00 frames. ], tot_loss[loss=0.1419, simple_loss=0.2221, pruned_loss=0.0309, over 892279.81 frames. ], batch size: 131, lr: 3.48e-03, grad_scale: 16.0 +2023-03-29 19:04:26,786 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9497, 4.9479, 5.3289, 5.0791, 5.1637, 4.8361, 5.0892, 4.8290], + device='cuda:0'), covar=tensor([0.1417, 0.1567, 0.0889, 0.1294, 0.0771, 0.1002, 0.1712, 0.2019], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0358, 0.0387, 0.0320, 0.0294, 0.0301, 0.0379, 0.0408], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 19:05:21,207 INFO [train.py:892] (0/4) Epoch 45, batch 100, loss[loss=0.1266, simple_loss=0.2093, pruned_loss=0.02195, over 19642.00 frames. ], tot_loss[loss=0.1461, simple_loss=0.2281, pruned_loss=0.03206, over 1570956.31 frames. ], batch size: 69, lr: 3.48e-03, grad_scale: 16.0 +2023-03-29 19:05:44,636 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81731.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 19:05:49,386 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.145e+02 3.372e+02 3.916e+02 5.007e+02 1.092e+03, threshold=7.832e+02, percent-clipped=2.0 +2023-03-29 19:06:03,082 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9277, 2.5637, 2.7920, 3.1325, 3.5289, 3.8501, 3.6410, 3.6963], + device='cuda:0'), covar=tensor([0.1001, 0.1592, 0.1364, 0.0777, 0.0533, 0.0277, 0.0464, 0.0519], + device='cuda:0'), in_proj_covar=tensor([0.0166, 0.0169, 0.0183, 0.0157, 0.0144, 0.0138, 0.0133, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 19:06:13,873 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6360, 2.6844, 4.1584, 3.0860, 3.3259, 3.0723, 2.3157, 2.4377], + device='cuda:0'), covar=tensor([0.1214, 0.3226, 0.0566, 0.1285, 0.2090, 0.1803, 0.2875, 0.3018], + device='cuda:0'), in_proj_covar=tensor([0.0361, 0.0406, 0.0360, 0.0300, 0.0382, 0.0404, 0.0392, 0.0369], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 19:07:18,289 INFO [train.py:892] (0/4) Epoch 45, batch 150, loss[loss=0.1375, simple_loss=0.2206, pruned_loss=0.02717, over 19803.00 frames. ], tot_loss[loss=0.1464, simple_loss=0.228, pruned_loss=0.03238, over 2098386.28 frames. ], batch size: 172, lr: 3.48e-03, grad_scale: 16.0 +2023-03-29 19:07:21,554 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81772.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:09:11,566 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81820.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:09:12,766 INFO [train.py:892] (0/4) Epoch 45, batch 200, loss[loss=0.1138, simple_loss=0.1956, pruned_loss=0.01596, over 19803.00 frames. ], tot_loss[loss=0.1474, simple_loss=0.2287, pruned_loss=0.03308, over 2509995.56 frames. ], batch size: 83, lr: 3.48e-03, grad_scale: 16.0 +2023-03-29 19:09:39,857 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.431e+02 3.452e+02 4.222e+02 4.821e+02 9.570e+02, threshold=8.444e+02, percent-clipped=2.0 +2023-03-29 19:10:54,794 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.08 vs. limit=2.0 +2023-03-29 19:11:08,731 INFO [train.py:892] (0/4) Epoch 45, batch 250, loss[loss=0.1504, simple_loss=0.2329, pruned_loss=0.034, over 19804.00 frames. ], tot_loss[loss=0.147, simple_loss=0.2284, pruned_loss=0.03284, over 2829839.86 frames. ], batch size: 68, lr: 3.48e-03, grad_scale: 16.0 +2023-03-29 19:12:49,230 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.22 vs. limit=5.0 +2023-03-29 19:13:09,802 INFO [train.py:892] (0/4) Epoch 45, batch 300, loss[loss=0.1513, simple_loss=0.2241, pruned_loss=0.03931, over 19856.00 frames. ], tot_loss[loss=0.148, simple_loss=0.2302, pruned_loss=0.03295, over 3075891.31 frames. ], batch size: 165, lr: 3.48e-03, grad_scale: 16.0 +2023-03-29 19:13:37,298 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.189e+02 3.336e+02 4.013e+02 4.745e+02 8.684e+02, threshold=8.026e+02, percent-clipped=2.0 +2023-03-29 19:14:30,296 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81955.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:14:34,790 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81957.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:15:04,586 INFO [train.py:892] (0/4) Epoch 45, batch 350, loss[loss=0.1496, simple_loss=0.2366, pruned_loss=0.03131, over 19876.00 frames. ], tot_loss[loss=0.1473, simple_loss=0.2295, pruned_loss=0.03254, over 3270681.18 frames. ], batch size: 77, lr: 3.47e-03, grad_scale: 16.0 +2023-03-29 19:15:45,226 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81987.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:16:12,355 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-82000.pt +2023-03-29 19:16:22,838 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82003.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:16:26,955 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82005.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:17:03,728 INFO [train.py:892] (0/4) Epoch 45, batch 400, loss[loss=0.1427, simple_loss=0.2278, pruned_loss=0.02876, over 19580.00 frames. ], tot_loss[loss=0.1475, simple_loss=0.2302, pruned_loss=0.03235, over 3418321.57 frames. ], batch size: 42, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:17:15,207 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82026.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 19:17:33,549 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.143e+02 3.235e+02 3.968e+02 4.770e+02 8.639e+02, threshold=7.936e+02, percent-clipped=1.0 +2023-03-29 19:18:09,219 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82048.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:18:26,170 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82055.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:18:39,975 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8407, 3.5606, 3.6572, 3.8754, 3.6758, 3.8077, 3.8891, 4.1044], + device='cuda:0'), covar=tensor([0.0724, 0.0556, 0.0639, 0.0462, 0.0791, 0.0676, 0.0574, 0.0385], + device='cuda:0'), in_proj_covar=tensor([0.0161, 0.0186, 0.0207, 0.0185, 0.0183, 0.0167, 0.0161, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 19:19:03,142 INFO [train.py:892] (0/4) Epoch 45, batch 450, loss[loss=0.1413, simple_loss=0.2262, pruned_loss=0.02821, over 19792.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.2314, pruned_loss=0.03322, over 3536848.36 frames. ], batch size: 79, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:19:06,949 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.57 vs. limit=5.0 +2023-03-29 19:20:10,826 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7243, 2.2706, 2.4817, 2.9670, 3.2895, 3.5021, 3.3964, 3.3820], + device='cuda:0'), covar=tensor([0.1063, 0.1658, 0.1462, 0.0758, 0.0560, 0.0364, 0.0452, 0.0573], + device='cuda:0'), in_proj_covar=tensor([0.0167, 0.0169, 0.0183, 0.0157, 0.0144, 0.0138, 0.0133, 0.0123], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 19:20:51,321 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82116.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:21:01,433 INFO [train.py:892] (0/4) Epoch 45, batch 500, loss[loss=0.138, simple_loss=0.2215, pruned_loss=0.02729, over 19797.00 frames. ], tot_loss[loss=0.1488, simple_loss=0.231, pruned_loss=0.03332, over 3629769.62 frames. ], batch size: 45, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:21:29,409 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.527e+02 3.621e+02 4.307e+02 5.011e+02 9.871e+02, threshold=8.613e+02, percent-clipped=3.0 +2023-03-29 19:21:30,519 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6018, 3.6768, 2.2618, 3.8107, 3.9236, 1.8608, 3.2912, 3.0530], + device='cuda:0'), covar=tensor([0.0803, 0.0894, 0.2843, 0.0803, 0.0635, 0.2798, 0.1148, 0.0986], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0268, 0.0239, 0.0290, 0.0270, 0.0209, 0.0246, 0.0211], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 19:22:58,965 INFO [train.py:892] (0/4) Epoch 45, batch 550, loss[loss=0.1438, simple_loss=0.2286, pruned_loss=0.02953, over 19742.00 frames. ], tot_loss[loss=0.1499, simple_loss=0.2322, pruned_loss=0.03378, over 3699470.76 frames. ], batch size: 221, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:24:01,340 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9753, 2.6227, 2.8587, 3.2212, 3.6636, 4.1189, 3.9207, 3.9746], + device='cuda:0'), covar=tensor([0.1029, 0.1610, 0.1417, 0.0735, 0.0484, 0.0252, 0.0429, 0.0462], + device='cuda:0'), in_proj_covar=tensor([0.0169, 0.0172, 0.0185, 0.0158, 0.0146, 0.0140, 0.0135, 0.0124], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 19:24:56,173 INFO [train.py:892] (0/4) Epoch 45, batch 600, loss[loss=0.1346, simple_loss=0.2181, pruned_loss=0.02554, over 19778.00 frames. ], tot_loss[loss=0.1495, simple_loss=0.2318, pruned_loss=0.03356, over 3753877.63 frames. ], batch size: 66, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:25:22,517 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.595e+02 3.638e+02 4.311e+02 5.322e+02 1.783e+03, threshold=8.623e+02, percent-clipped=2.0 +2023-03-29 19:25:33,339 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.0850, 1.5660, 1.7015, 2.3226, 2.4442, 2.6530, 2.4794, 2.5468], + device='cuda:0'), covar=tensor([0.1302, 0.2094, 0.1959, 0.0898, 0.0744, 0.0496, 0.0621, 0.0661], + device='cuda:0'), in_proj_covar=tensor([0.0169, 0.0172, 0.0185, 0.0158, 0.0146, 0.0140, 0.0134, 0.0124], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 19:26:49,029 INFO [train.py:892] (0/4) Epoch 45, batch 650, loss[loss=0.1717, simple_loss=0.2618, pruned_loss=0.04075, over 19729.00 frames. ], tot_loss[loss=0.1485, simple_loss=0.2307, pruned_loss=0.0332, over 3797593.55 frames. ], batch size: 51, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:28:46,103 INFO [train.py:892] (0/4) Epoch 45, batch 700, loss[loss=0.1526, simple_loss=0.2363, pruned_loss=0.0344, over 19593.00 frames. ], tot_loss[loss=0.1478, simple_loss=0.2302, pruned_loss=0.03272, over 3831327.58 frames. ], batch size: 45, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:28:57,043 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9462, 4.0483, 2.5128, 4.2067, 4.3658, 2.0218, 3.6637, 3.4418], + device='cuda:0'), covar=tensor([0.0731, 0.0857, 0.2639, 0.0777, 0.0568, 0.2743, 0.1040, 0.0843], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0267, 0.0239, 0.0289, 0.0269, 0.0208, 0.0246, 0.0210], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 19:29:01,226 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82326.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:29:15,651 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.176e+02 3.457e+02 4.012e+02 4.787e+02 1.008e+03, threshold=8.024e+02, percent-clipped=2.0 +2023-03-29 19:29:40,030 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82343.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:29:46,429 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82346.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:29:46,462 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([1.9250, 2.1187, 1.9639, 1.4108, 1.9990, 2.1007, 2.0179, 2.0518], + device='cuda:0'), covar=tensor([0.0508, 0.0378, 0.0435, 0.0648, 0.0479, 0.0371, 0.0398, 0.0371], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0109, 0.0109, 0.0110, 0.0113, 0.0099, 0.0101, 0.0099], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 19:30:08,210 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1381, 3.1451, 3.4718, 3.1500, 3.0725, 3.4527, 3.2896, 3.5255], + device='cuda:0'), covar=tensor([0.1058, 0.0477, 0.0503, 0.0558, 0.2032, 0.0733, 0.0609, 0.0519], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0231, 0.0232, 0.0244, 0.0213, 0.0257, 0.0245, 0.0231], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 19:30:42,587 INFO [train.py:892] (0/4) Epoch 45, batch 750, loss[loss=0.1352, simple_loss=0.2171, pruned_loss=0.02666, over 19765.00 frames. ], tot_loss[loss=0.1473, simple_loss=0.2297, pruned_loss=0.03241, over 3857966.44 frames. ], batch size: 213, lr: 3.47e-03, grad_scale: 32.0 +2023-03-29 19:30:49,664 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82374.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:32:10,254 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82407.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:32:14,023 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7465, 4.4312, 4.5411, 4.7132, 4.5312, 4.9468, 4.8188, 5.0158], + device='cuda:0'), covar=tensor([0.0656, 0.0421, 0.0464, 0.0369, 0.0634, 0.0392, 0.0460, 0.0259], + device='cuda:0'), in_proj_covar=tensor([0.0162, 0.0187, 0.0207, 0.0185, 0.0184, 0.0167, 0.0161, 0.0208], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 19:32:19,244 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82411.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:32:38,390 INFO [train.py:892] (0/4) Epoch 45, batch 800, loss[loss=0.1411, simple_loss=0.2188, pruned_loss=0.03175, over 19777.00 frames. ], tot_loss[loss=0.1471, simple_loss=0.2292, pruned_loss=0.03252, over 3878961.23 frames. ], batch size: 154, lr: 3.46e-03, grad_scale: 32.0 +2023-03-29 19:32:48,936 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2777, 2.7595, 3.3232, 2.7264, 3.3058, 3.2456, 4.1762, 4.4951], + device='cuda:0'), covar=tensor([0.0617, 0.2050, 0.1624, 0.2619, 0.1976, 0.1978, 0.0594, 0.0598], + device='cuda:0'), in_proj_covar=tensor([0.0263, 0.0248, 0.0275, 0.0263, 0.0310, 0.0267, 0.0241, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 19:33:03,753 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.510e+02 3.458e+02 4.305e+02 5.045e+02 7.264e+02, threshold=8.610e+02, percent-clipped=0.0 +2023-03-29 19:33:30,102 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.35 vs. limit=5.0 +2023-03-29 19:34:06,483 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-29 19:34:36,639 INFO [train.py:892] (0/4) Epoch 45, batch 850, loss[loss=0.1734, simple_loss=0.2508, pruned_loss=0.048, over 19876.00 frames. ], tot_loss[loss=0.1481, simple_loss=0.23, pruned_loss=0.03313, over 3894908.44 frames. ], batch size: 136, lr: 3.46e-03, grad_scale: 32.0 +2023-03-29 19:34:55,647 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9053, 4.5640, 4.6213, 4.8544, 4.6907, 5.0467, 4.9938, 5.1759], + device='cuda:0'), covar=tensor([0.0628, 0.0448, 0.0451, 0.0400, 0.0672, 0.0426, 0.0441, 0.0304], + device='cuda:0'), in_proj_covar=tensor([0.0161, 0.0186, 0.0206, 0.0184, 0.0182, 0.0166, 0.0160, 0.0206], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 19:35:25,043 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82493.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 19:36:29,767 INFO [train.py:892] (0/4) Epoch 45, batch 900, loss[loss=0.1574, simple_loss=0.2409, pruned_loss=0.0369, over 19751.00 frames. ], tot_loss[loss=0.1494, simple_loss=0.231, pruned_loss=0.03389, over 3907571.17 frames. ], batch size: 259, lr: 3.46e-03, grad_scale: 32.0 +2023-03-29 19:36:57,910 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.335e+02 3.589e+02 4.303e+02 4.859e+02 1.125e+03, threshold=8.606e+02, percent-clipped=1.0 +2023-03-29 19:37:49,477 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82554.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 19:38:17,395 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6140, 3.1482, 3.5323, 3.0727, 3.8071, 3.8035, 4.3606, 4.8609], + device='cuda:0'), covar=tensor([0.0542, 0.1671, 0.1524, 0.2150, 0.1835, 0.1329, 0.0600, 0.0514], + device='cuda:0'), in_proj_covar=tensor([0.0264, 0.0248, 0.0276, 0.0264, 0.0310, 0.0267, 0.0242, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 19:38:23,876 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82569.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:38:27,683 INFO [train.py:892] (0/4) Epoch 45, batch 950, loss[loss=0.1465, simple_loss=0.2206, pruned_loss=0.03616, over 19817.00 frames. ], tot_loss[loss=0.1488, simple_loss=0.2307, pruned_loss=0.03344, over 3916068.54 frames. ], batch size: 147, lr: 3.46e-03, grad_scale: 16.0 +2023-03-29 19:38:37,714 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5290, 4.6152, 2.6832, 4.8742, 5.0574, 2.1874, 4.2687, 3.6687], + device='cuda:0'), covar=tensor([0.0606, 0.0669, 0.2663, 0.0625, 0.0505, 0.2733, 0.0926, 0.0901], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0269, 0.0240, 0.0291, 0.0270, 0.0209, 0.0247, 0.0211], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 19:39:17,902 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8636, 2.8939, 4.4284, 3.3104, 3.5118, 3.3007, 2.4571, 2.6114], + device='cuda:0'), covar=tensor([0.1083, 0.3434, 0.0483, 0.1210, 0.2021, 0.1792, 0.3012, 0.2926], + device='cuda:0'), in_proj_covar=tensor([0.0359, 0.0405, 0.0358, 0.0299, 0.0381, 0.0403, 0.0391, 0.0368], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 19:40:26,276 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0949, 4.1741, 2.4455, 4.3456, 4.5169, 2.0796, 3.7533, 3.3848], + device='cuda:0'), covar=tensor([0.0683, 0.0738, 0.2841, 0.0693, 0.0496, 0.2671, 0.1025, 0.0895], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0270, 0.0242, 0.0292, 0.0272, 0.0210, 0.0249, 0.0212], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 19:40:27,148 INFO [train.py:892] (0/4) Epoch 45, batch 1000, loss[loss=0.1294, simple_loss=0.2098, pruned_loss=0.02454, over 19809.00 frames. ], tot_loss[loss=0.1482, simple_loss=0.23, pruned_loss=0.03317, over 3924661.33 frames. ], batch size: 105, lr: 3.46e-03, grad_scale: 16.0 +2023-03-29 19:40:51,670 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82630.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:40:56,922 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.59 vs. limit=2.0 +2023-03-29 19:41:01,298 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.945e+02 3.409e+02 3.840e+02 4.535e+02 7.731e+02, threshold=7.679e+02, percent-clipped=0.0 +2023-03-29 19:41:22,510 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82643.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:42:26,401 INFO [train.py:892] (0/4) Epoch 45, batch 1050, loss[loss=0.1796, simple_loss=0.2727, pruned_loss=0.04325, over 19532.00 frames. ], tot_loss[loss=0.1478, simple_loss=0.2296, pruned_loss=0.03297, over 3930955.58 frames. ], batch size: 54, lr: 3.46e-03, grad_scale: 16.0 +2023-03-29 19:42:44,808 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4187, 3.0537, 3.4100, 2.9494, 3.6277, 3.5511, 4.2155, 4.6643], + device='cuda:0'), covar=tensor([0.0542, 0.1636, 0.1610, 0.2226, 0.1655, 0.1524, 0.0603, 0.0497], + device='cuda:0'), in_proj_covar=tensor([0.0264, 0.0248, 0.0276, 0.0263, 0.0310, 0.0267, 0.0242, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 19:43:12,515 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82691.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:43:39,243 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82702.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:43:59,377 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82711.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:44:18,901 INFO [train.py:892] (0/4) Epoch 45, batch 1100, loss[loss=0.1486, simple_loss=0.2304, pruned_loss=0.0334, over 19821.00 frames. ], tot_loss[loss=0.147, simple_loss=0.2288, pruned_loss=0.03257, over 3936598.90 frames. ], batch size: 288, lr: 3.46e-03, grad_scale: 16.0 +2023-03-29 19:44:29,210 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-29 19:44:49,684 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.363e+02 3.616e+02 4.255e+02 4.865e+02 1.038e+03, threshold=8.510e+02, percent-clipped=5.0 +2023-03-29 19:45:48,337 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82759.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:46:17,014 INFO [train.py:892] (0/4) Epoch 45, batch 1150, loss[loss=0.1335, simple_loss=0.2225, pruned_loss=0.02224, over 19691.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.2307, pruned_loss=0.03331, over 3935988.92 frames. ], batch size: 59, lr: 3.46e-03, grad_scale: 16.0 +2023-03-29 19:48:14,831 INFO [train.py:892] (0/4) Epoch 45, batch 1200, loss[loss=0.1598, simple_loss=0.2371, pruned_loss=0.04124, over 19783.00 frames. ], tot_loss[loss=0.1496, simple_loss=0.2315, pruned_loss=0.0338, over 3939101.29 frames. ], batch size: 247, lr: 3.46e-03, grad_scale: 16.0 +2023-03-29 19:48:45,773 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.475e+02 3.607e+02 4.196e+02 5.114e+02 1.465e+03, threshold=8.391e+02, percent-clipped=1.0 +2023-03-29 19:49:22,150 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82849.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 19:50:06,282 INFO [train.py:892] (0/4) Epoch 45, batch 1250, loss[loss=0.1587, simple_loss=0.2468, pruned_loss=0.03528, over 19748.00 frames. ], tot_loss[loss=0.1494, simple_loss=0.2312, pruned_loss=0.03375, over 3942916.83 frames. ], batch size: 259, lr: 3.46e-03, grad_scale: 16.0 +2023-03-29 19:52:02,782 INFO [train.py:892] (0/4) Epoch 45, batch 1300, loss[loss=0.1603, simple_loss=0.2477, pruned_loss=0.03641, over 19738.00 frames. ], tot_loss[loss=0.1494, simple_loss=0.2313, pruned_loss=0.03372, over 3944917.50 frames. ], batch size: 95, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 19:52:15,005 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82925.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:52:34,742 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.586e+02 3.425e+02 4.088e+02 4.844e+02 1.609e+03, threshold=8.176e+02, percent-clipped=1.0 +2023-03-29 19:53:43,441 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82963.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:54:02,449 INFO [train.py:892] (0/4) Epoch 45, batch 1350, loss[loss=0.1504, simple_loss=0.2355, pruned_loss=0.03268, over 19771.00 frames. ], tot_loss[loss=0.148, simple_loss=0.23, pruned_loss=0.03303, over 3946471.18 frames. ], batch size: 247, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 19:55:12,964 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8540, 3.5774, 3.7002, 3.8503, 3.6657, 3.8530, 3.8973, 4.0736], + device='cuda:0'), covar=tensor([0.0669, 0.0545, 0.0542, 0.0462, 0.0788, 0.0616, 0.0522, 0.0396], + device='cuda:0'), in_proj_covar=tensor([0.0164, 0.0190, 0.0209, 0.0188, 0.0186, 0.0169, 0.0163, 0.0211], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 19:55:16,978 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83002.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:55:59,011 INFO [train.py:892] (0/4) Epoch 45, batch 1400, loss[loss=0.1591, simple_loss=0.2359, pruned_loss=0.04115, over 19773.00 frames. ], tot_loss[loss=0.1476, simple_loss=0.2294, pruned_loss=0.03293, over 3947945.91 frames. ], batch size: 213, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 19:56:09,208 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83024.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:56:29,944 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.035e+02 3.427e+02 4.004e+02 4.659e+02 9.090e+02, threshold=8.008e+02, percent-clipped=1.0 +2023-03-29 19:56:49,973 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.91 vs. limit=5.0 +2023-03-29 19:57:07,388 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83050.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 19:57:56,914 INFO [train.py:892] (0/4) Epoch 45, batch 1450, loss[loss=0.1483, simple_loss=0.2206, pruned_loss=0.03803, over 19763.00 frames. ], tot_loss[loss=0.147, simple_loss=0.2287, pruned_loss=0.03262, over 3947540.79 frames. ], batch size: 213, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 19:59:49,374 INFO [train.py:892] (0/4) Epoch 45, batch 1500, loss[loss=0.1356, simple_loss=0.2125, pruned_loss=0.02937, over 19840.00 frames. ], tot_loss[loss=0.1467, simple_loss=0.2283, pruned_loss=0.03252, over 3948881.61 frames. ], batch size: 160, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 20:00:17,874 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.217e+02 3.289e+02 3.880e+02 4.653e+02 8.208e+02, threshold=7.760e+02, percent-clipped=2.0 +2023-03-29 20:00:55,586 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83149.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 20:01:13,646 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.8560, 6.1232, 6.1674, 6.0359, 5.8834, 6.1565, 5.5310, 5.5233], + device='cuda:0'), covar=tensor([0.0454, 0.0489, 0.0463, 0.0411, 0.0572, 0.0461, 0.0610, 0.0984], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0312, 0.0322, 0.0282, 0.0292, 0.0274, 0.0285, 0.0335], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 20:01:45,137 INFO [train.py:892] (0/4) Epoch 45, batch 1550, loss[loss=0.149, simple_loss=0.2355, pruned_loss=0.03121, over 19665.00 frames. ], tot_loss[loss=0.1461, simple_loss=0.2279, pruned_loss=0.0321, over 3948670.05 frames. ], batch size: 51, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 20:02:46,107 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83196.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:02:48,026 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83197.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 20:03:45,846 INFO [train.py:892] (0/4) Epoch 45, batch 1600, loss[loss=0.1288, simple_loss=0.2082, pruned_loss=0.02471, over 19867.00 frames. ], tot_loss[loss=0.1465, simple_loss=0.2287, pruned_loss=0.03217, over 3948188.99 frames. ], batch size: 99, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 20:03:57,112 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83225.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:04:15,359 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.230e+02 3.453e+02 4.061e+02 4.762e+02 8.632e+02, threshold=8.122e+02, percent-clipped=1.0 +2023-03-29 20:05:10,945 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83257.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:05:41,846 INFO [train.py:892] (0/4) Epoch 45, batch 1650, loss[loss=0.1329, simple_loss=0.2181, pruned_loss=0.02384, over 19780.00 frames. ], tot_loss[loss=0.146, simple_loss=0.2281, pruned_loss=0.032, over 3950300.51 frames. ], batch size: 87, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 20:05:47,004 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83273.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:06:10,115 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83284.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:07:29,138 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83319.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:07:33,430 INFO [train.py:892] (0/4) Epoch 45, batch 1700, loss[loss=0.1555, simple_loss=0.2554, pruned_loss=0.0278, over 19675.00 frames. ], tot_loss[loss=0.1462, simple_loss=0.2284, pruned_loss=0.03197, over 3949409.40 frames. ], batch size: 55, lr: 3.45e-03, grad_scale: 16.0 +2023-03-29 20:08:02,062 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.889e+02 3.340e+02 3.879e+02 4.594e+02 1.209e+03, threshold=7.757e+02, percent-clipped=3.0 +2023-03-29 20:08:28,464 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83345.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:08:37,462 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.4452, 2.0449, 3.5697, 3.0145, 3.5837, 3.6243, 3.3682, 3.4688], + device='cuda:0'), covar=tensor([0.0950, 0.1287, 0.0146, 0.0470, 0.0175, 0.0264, 0.0247, 0.0217], + device='cuda:0'), in_proj_covar=tensor([0.0106, 0.0107, 0.0093, 0.0155, 0.0092, 0.0105, 0.0095, 0.0091], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 20:09:03,458 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1122, 3.2443, 2.9133, 2.5089, 2.9510, 3.2335, 3.1937, 3.3000], + device='cuda:0'), covar=tensor([0.0329, 0.0297, 0.0330, 0.0526, 0.0367, 0.0355, 0.0271, 0.0260], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0110, 0.0110, 0.0109, 0.0113, 0.0100, 0.0101, 0.0100], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 20:09:20,240 INFO [train.py:892] (0/4) Epoch 45, batch 1750, loss[loss=0.1612, simple_loss=0.2522, pruned_loss=0.0351, over 19650.00 frames. ], tot_loss[loss=0.1472, simple_loss=0.2299, pruned_loss=0.03225, over 3947755.77 frames. ], batch size: 79, lr: 3.44e-03, grad_scale: 16.0 +2023-03-29 20:09:50,955 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-29 20:10:59,578 INFO [train.py:892] (0/4) Epoch 45, batch 1800, loss[loss=0.1507, simple_loss=0.2383, pruned_loss=0.03156, over 19805.00 frames. ], tot_loss[loss=0.1475, simple_loss=0.2298, pruned_loss=0.03256, over 3949956.04 frames. ], batch size: 288, lr: 3.44e-03, grad_scale: 16.0 +2023-03-29 20:11:23,979 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.557e+02 3.459e+02 4.040e+02 5.185e+02 7.885e+02, threshold=8.081e+02, percent-clipped=1.0 +2023-03-29 20:12:35,802 INFO [train.py:892] (0/4) Epoch 45, batch 1850, loss[loss=0.1466, simple_loss=0.2247, pruned_loss=0.03422, over 19824.00 frames. ], tot_loss[loss=0.1478, simple_loss=0.2303, pruned_loss=0.03267, over 3948674.61 frames. ], batch size: 57, lr: 3.44e-03, grad_scale: 16.0 +2023-03-29 20:12:44,378 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-45.pt +2023-03-29 20:13:42,298 INFO [train.py:892] (0/4) Epoch 46, batch 0, loss[loss=0.1394, simple_loss=0.2156, pruned_loss=0.03159, over 19849.00 frames. ], tot_loss[loss=0.1394, simple_loss=0.2156, pruned_loss=0.03159, over 19849.00 frames. ], batch size: 112, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:13:42,299 INFO [train.py:917] (0/4) Computing validation loss +2023-03-29 20:14:19,281 INFO [train.py:926] (0/4) Epoch 46, validation: loss=0.1879, simple_loss=0.2498, pruned_loss=0.06295, over 2883724.00 frames. +2023-03-29 20:14:19,282 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-29 20:16:21,239 INFO [train.py:892] (0/4) Epoch 46, batch 50, loss[loss=0.1575, simple_loss=0.2367, pruned_loss=0.03919, over 19746.00 frames. ], tot_loss[loss=0.1449, simple_loss=0.2253, pruned_loss=0.03229, over 889426.88 frames. ], batch size: 134, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:16:38,123 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.206e+02 3.234e+02 3.698e+02 4.952e+02 1.024e+03, threshold=7.395e+02, percent-clipped=5.0 +2023-03-29 20:16:51,195 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-03-29 20:17:20,613 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83552.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:17:24,058 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8668, 2.8420, 3.0323, 2.6545, 3.2163, 3.1430, 3.6662, 4.0019], + device='cuda:0'), covar=tensor([0.0655, 0.1662, 0.1617, 0.2296, 0.1539, 0.1508, 0.0723, 0.0660], + device='cuda:0'), in_proj_covar=tensor([0.0265, 0.0249, 0.0278, 0.0265, 0.0312, 0.0268, 0.0243, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 20:18:14,836 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6050, 2.6779, 4.9741, 4.1894, 4.4159, 4.8275, 4.6609, 4.5501], + device='cuda:0'), covar=tensor([0.0646, 0.1120, 0.0099, 0.0805, 0.0193, 0.0196, 0.0153, 0.0170], + device='cuda:0'), in_proj_covar=tensor([0.0106, 0.0108, 0.0093, 0.0155, 0.0091, 0.0105, 0.0095, 0.0091], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 20:18:16,053 INFO [train.py:892] (0/4) Epoch 46, batch 100, loss[loss=0.1418, simple_loss=0.2318, pruned_loss=0.02591, over 19830.00 frames. ], tot_loss[loss=0.1457, simple_loss=0.2269, pruned_loss=0.0322, over 1567586.92 frames. ], batch size: 52, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:19:27,946 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7498, 2.2016, 2.5303, 2.9338, 3.2617, 3.4432, 3.3032, 3.3351], + device='cuda:0'), covar=tensor([0.1016, 0.1768, 0.1395, 0.0782, 0.0574, 0.0399, 0.0496, 0.0567], + device='cuda:0'), in_proj_covar=tensor([0.0168, 0.0171, 0.0183, 0.0158, 0.0145, 0.0139, 0.0134, 0.0123], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 20:19:59,436 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83619.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:20:13,490 INFO [train.py:892] (0/4) Epoch 46, batch 150, loss[loss=0.1345, simple_loss=0.2076, pruned_loss=0.03068, over 19855.00 frames. ], tot_loss[loss=0.1459, simple_loss=0.2275, pruned_loss=0.03213, over 2095746.55 frames. ], batch size: 142, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:20:31,958 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.341e+02 3.214e+02 3.787e+02 4.556e+02 6.937e+02, threshold=7.575e+02, percent-clipped=0.0 +2023-03-29 20:20:51,174 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83640.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:21:17,805 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8575, 2.8870, 1.8346, 3.3102, 3.0299, 3.1774, 3.3627, 2.6791], + device='cuda:0'), covar=tensor([0.0714, 0.0774, 0.1756, 0.0671, 0.0733, 0.0681, 0.0585, 0.0923], + device='cuda:0'), in_proj_covar=tensor([0.0152, 0.0152, 0.0149, 0.0163, 0.0141, 0.0148, 0.0157, 0.0155], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-29 20:21:25,247 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5925, 4.7443, 2.7459, 5.0142, 5.1734, 2.2336, 4.4514, 3.6785], + device='cuda:0'), covar=tensor([0.0618, 0.0619, 0.2648, 0.0573, 0.0409, 0.2723, 0.0840, 0.0868], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0270, 0.0242, 0.0292, 0.0271, 0.0209, 0.0249, 0.0212], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 20:21:54,618 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83667.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:22:16,398 INFO [train.py:892] (0/4) Epoch 46, batch 200, loss[loss=0.1379, simple_loss=0.2218, pruned_loss=0.02704, over 19836.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.2305, pruned_loss=0.03341, over 2505869.60 frames. ], batch size: 161, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:24:05,078 INFO [train.py:892] (0/4) Epoch 46, batch 250, loss[loss=0.1553, simple_loss=0.2471, pruned_loss=0.03181, over 19665.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.231, pruned_loss=0.0334, over 2825470.87 frames. ], batch size: 50, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:24:21,763 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.227e+02 3.614e+02 4.201e+02 5.267e+02 1.212e+03, threshold=8.403e+02, percent-clipped=3.0 +2023-03-29 20:25:25,610 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83760.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:26:06,559 INFO [train.py:892] (0/4) Epoch 46, batch 300, loss[loss=0.1776, simple_loss=0.2522, pruned_loss=0.05146, over 19667.00 frames. ], tot_loss[loss=0.1486, simple_loss=0.2304, pruned_loss=0.03342, over 3075698.44 frames. ], batch size: 64, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:26:45,009 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.8998, 2.8576, 2.9924, 2.4366, 3.0525, 2.6498, 2.9305, 3.0141], + device='cuda:0'), covar=tensor([0.0640, 0.0553, 0.0559, 0.0853, 0.0435, 0.0537, 0.0578, 0.0453], + device='cuda:0'), in_proj_covar=tensor([0.0086, 0.0096, 0.0092, 0.0115, 0.0088, 0.0091, 0.0088, 0.0083], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 20:27:10,418 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8458, 3.9507, 2.4410, 4.1758, 4.3680, 2.0957, 3.4618, 3.2166], + device='cuda:0'), covar=tensor([0.0779, 0.0912, 0.2744, 0.0817, 0.0550, 0.2840, 0.1321, 0.1001], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0269, 0.0241, 0.0290, 0.0269, 0.0208, 0.0247, 0.0211], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 20:28:00,017 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83821.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:28:11,263 INFO [train.py:892] (0/4) Epoch 46, batch 350, loss[loss=0.1294, simple_loss=0.2118, pruned_loss=0.02348, over 19861.00 frames. ], tot_loss[loss=0.1479, simple_loss=0.2297, pruned_loss=0.03307, over 3270203.23 frames. ], batch size: 165, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:28:31,194 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.272e+02 3.501e+02 4.159e+02 5.209e+02 9.522e+02, threshold=8.317e+02, percent-clipped=2.0 +2023-03-29 20:29:18,570 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83852.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:30:17,300 INFO [train.py:892] (0/4) Epoch 46, batch 400, loss[loss=0.1302, simple_loss=0.2106, pruned_loss=0.02484, over 19839.00 frames. ], tot_loss[loss=0.1485, simple_loss=0.2303, pruned_loss=0.03332, over 3420621.32 frames. ], batch size: 101, lr: 3.40e-03, grad_scale: 16.0 +2023-03-29 20:31:21,554 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83900.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:32:25,457 INFO [train.py:892] (0/4) Epoch 46, batch 450, loss[loss=0.1577, simple_loss=0.2403, pruned_loss=0.03759, over 19765.00 frames. ], tot_loss[loss=0.1488, simple_loss=0.2304, pruned_loss=0.03358, over 3537836.08 frames. ], batch size: 88, lr: 3.40e-03, grad_scale: 8.0 +2023-03-29 20:32:48,218 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.948e+02 3.420e+02 3.775e+02 4.552e+02 8.094e+02, threshold=7.550e+02, percent-clipped=0.0 +2023-03-29 20:33:01,238 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83940.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:33:22,527 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83949.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:34:29,721 INFO [train.py:892] (0/4) Epoch 46, batch 500, loss[loss=0.1442, simple_loss=0.2292, pruned_loss=0.02963, over 19593.00 frames. ], tot_loss[loss=0.1483, simple_loss=0.2302, pruned_loss=0.03326, over 3629465.10 frames. ], batch size: 45, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:34:45,331 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-29 20:34:51,512 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83985.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:34:58,661 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83988.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:35:29,586 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-84000.pt +2023-03-29 20:35:59,532 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84010.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 20:36:35,408 INFO [train.py:892] (0/4) Epoch 46, batch 550, loss[loss=0.1431, simple_loss=0.227, pruned_loss=0.02955, over 19764.00 frames. ], tot_loss[loss=0.1482, simple_loss=0.2297, pruned_loss=0.03334, over 3701739.38 frames. ], batch size: 198, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:36:58,609 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.358e+02 3.554e+02 4.105e+02 5.072e+02 7.939e+02, threshold=8.210e+02, percent-clipped=2.0 +2023-03-29 20:37:26,917 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84046.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:38:42,379 INFO [train.py:892] (0/4) Epoch 46, batch 600, loss[loss=0.1625, simple_loss=0.2414, pruned_loss=0.04178, over 19728.00 frames. ], tot_loss[loss=0.1482, simple_loss=0.2299, pruned_loss=0.03324, over 3756854.58 frames. ], batch size: 269, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:38:56,080 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-03-29 20:39:34,302 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84097.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:40:15,555 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84116.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:40:40,125 INFO [train.py:892] (0/4) Epoch 46, batch 650, loss[loss=0.1198, simple_loss=0.2093, pruned_loss=0.01509, over 19587.00 frames. ], tot_loss[loss=0.1474, simple_loss=0.2289, pruned_loss=0.03291, over 3799708.13 frames. ], batch size: 44, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:40:55,570 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.9061, 6.2093, 6.2644, 6.1448, 6.0063, 6.2127, 5.5402, 5.5460], + device='cuda:0'), covar=tensor([0.0418, 0.0440, 0.0408, 0.0380, 0.0511, 0.0449, 0.0670, 0.1055], + device='cuda:0'), in_proj_covar=tensor([0.0296, 0.0314, 0.0323, 0.0283, 0.0294, 0.0275, 0.0288, 0.0336], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 20:41:04,400 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.462e+02 3.455e+02 4.092e+02 4.989e+02 7.207e+02, threshold=8.185e+02, percent-clipped=0.0 +2023-03-29 20:41:39,151 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7801, 3.5092, 3.8173, 2.8199, 3.9535, 3.3770, 3.4479, 3.7155], + device='cuda:0'), covar=tensor([0.0654, 0.0488, 0.0580, 0.0870, 0.0325, 0.0452, 0.0620, 0.0450], + device='cuda:0'), in_proj_covar=tensor([0.0086, 0.0096, 0.0093, 0.0116, 0.0088, 0.0092, 0.0088, 0.0083], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 20:42:03,327 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84158.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:42:21,764 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.2676, 2.3100, 1.6075, 2.4389, 2.3340, 2.2916, 2.4209, 2.0019], + device='cuda:0'), covar=tensor([0.0729, 0.0780, 0.1271, 0.0744, 0.0736, 0.0699, 0.0687, 0.1056], + device='cuda:0'), in_proj_covar=tensor([0.0151, 0.0151, 0.0148, 0.0162, 0.0141, 0.0148, 0.0157, 0.0155], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-29 20:42:46,667 INFO [train.py:892] (0/4) Epoch 46, batch 700, loss[loss=0.1386, simple_loss=0.221, pruned_loss=0.02812, over 19750.00 frames. ], tot_loss[loss=0.1476, simple_loss=0.2297, pruned_loss=0.03279, over 3834253.43 frames. ], batch size: 256, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:43:35,286 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7909, 3.6567, 4.0339, 3.7015, 3.4467, 3.9123, 3.8030, 4.0631], + device='cuda:0'), covar=tensor([0.0742, 0.0386, 0.0372, 0.0409, 0.1215, 0.0616, 0.0502, 0.0400], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0234, 0.0236, 0.0246, 0.0216, 0.0261, 0.0248, 0.0233], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 20:43:45,103 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84199.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:44:53,965 INFO [train.py:892] (0/4) Epoch 46, batch 750, loss[loss=0.1307, simple_loss=0.2094, pruned_loss=0.02602, over 19749.00 frames. ], tot_loss[loss=0.147, simple_loss=0.2291, pruned_loss=0.03239, over 3858101.14 frames. ], batch size: 44, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:45:11,784 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4674, 4.2121, 4.2651, 4.4928, 4.2642, 4.5643, 4.5453, 4.7302], + device='cuda:0'), covar=tensor([0.0720, 0.0455, 0.0525, 0.0416, 0.0693, 0.0528, 0.0477, 0.0313], + device='cuda:0'), in_proj_covar=tensor([0.0165, 0.0191, 0.0211, 0.0190, 0.0188, 0.0170, 0.0164, 0.0211], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 20:45:18,699 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.966e+02 3.322e+02 4.189e+02 5.032e+02 8.486e+02, threshold=8.378e+02, percent-clipped=1.0 +2023-03-29 20:45:22,178 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84236.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:46:21,607 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84260.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:46:56,847 INFO [train.py:892] (0/4) Epoch 46, batch 800, loss[loss=0.1337, simple_loss=0.2162, pruned_loss=0.02562, over 19589.00 frames. ], tot_loss[loss=0.1481, simple_loss=0.2306, pruned_loss=0.03282, over 3877410.21 frames. ], batch size: 42, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:47:48,270 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84297.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:48:06,626 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84305.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 20:48:30,067 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5139, 4.2389, 4.2573, 4.5133, 4.2669, 4.5387, 4.5648, 4.7637], + device='cuda:0'), covar=tensor([0.0674, 0.0437, 0.0546, 0.0397, 0.0674, 0.0508, 0.0465, 0.0334], + device='cuda:0'), in_proj_covar=tensor([0.0166, 0.0192, 0.0212, 0.0191, 0.0188, 0.0171, 0.0165, 0.0213], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 20:48:59,454 INFO [train.py:892] (0/4) Epoch 46, batch 850, loss[loss=0.1352, simple_loss=0.214, pruned_loss=0.02823, over 19861.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.2313, pruned_loss=0.03303, over 3892765.89 frames. ], batch size: 122, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:49:22,810 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.218e+02 3.307e+02 4.048e+02 4.791e+02 1.064e+03, threshold=8.096e+02, percent-clipped=1.0 +2023-03-29 20:49:26,863 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84336.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:49:40,990 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84341.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:51:08,385 INFO [train.py:892] (0/4) Epoch 46, batch 900, loss[loss=0.1539, simple_loss=0.2283, pruned_loss=0.03972, over 19762.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.2309, pruned_loss=0.03326, over 3904266.26 frames. ], batch size: 152, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:51:40,514 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3007, 3.3623, 3.4629, 2.6608, 3.6179, 2.9918, 3.2817, 3.5406], + device='cuda:0'), covar=tensor([0.0886, 0.0424, 0.0682, 0.0847, 0.0308, 0.0519, 0.0508, 0.0345], + device='cuda:0'), in_proj_covar=tensor([0.0087, 0.0096, 0.0093, 0.0116, 0.0088, 0.0092, 0.0088, 0.0083], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 20:51:59,780 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84397.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:52:43,848 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.46 vs. limit=5.0 +2023-03-29 20:52:45,720 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84416.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:52:53,489 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2400, 2.9262, 3.3749, 3.4280, 3.8695, 4.4973, 4.2624, 4.3499], + device='cuda:0'), covar=tensor([0.0893, 0.1543, 0.1201, 0.0698, 0.0414, 0.0207, 0.0363, 0.0392], + device='cuda:0'), in_proj_covar=tensor([0.0169, 0.0173, 0.0185, 0.0160, 0.0146, 0.0140, 0.0136, 0.0125], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 20:52:55,914 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5847, 3.1628, 3.4858, 3.0536, 3.7853, 3.7852, 4.3891, 4.8609], + device='cuda:0'), covar=tensor([0.0450, 0.1600, 0.1459, 0.2234, 0.1515, 0.1323, 0.0538, 0.0391], + device='cuda:0'), in_proj_covar=tensor([0.0266, 0.0250, 0.0278, 0.0266, 0.0313, 0.0269, 0.0244, 0.0276], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 20:53:09,157 INFO [train.py:892] (0/4) Epoch 46, batch 950, loss[loss=0.128, simple_loss=0.2135, pruned_loss=0.02124, over 19850.00 frames. ], tot_loss[loss=0.1484, simple_loss=0.2309, pruned_loss=0.03294, over 3914917.66 frames. ], batch size: 81, lr: 3.39e-03, grad_scale: 8.0 +2023-03-29 20:53:10,232 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.0804, 2.2972, 2.1143, 1.5642, 2.1252, 2.2781, 2.2079, 2.2701], + device='cuda:0'), covar=tensor([0.0474, 0.0390, 0.0419, 0.0650, 0.0482, 0.0379, 0.0371, 0.0336], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0109, 0.0110, 0.0109, 0.0113, 0.0099, 0.0101, 0.0100], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 20:53:32,120 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.409e+02 3.247e+02 3.846e+02 4.740e+02 1.002e+03, threshold=7.692e+02, percent-clipped=2.0 +2023-03-29 20:54:04,221 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.2566, 4.8321, 4.9247, 4.6327, 5.1944, 3.2899, 4.2349, 2.6108], + device='cuda:0'), covar=tensor([0.0139, 0.0199, 0.0127, 0.0179, 0.0126, 0.0955, 0.0803, 0.1425], + device='cuda:0'), in_proj_covar=tensor([0.0110, 0.0155, 0.0119, 0.0141, 0.0125, 0.0140, 0.0146, 0.0133], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-29 20:54:17,723 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84453.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:54:42,921 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84464.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:55:12,998 INFO [train.py:892] (0/4) Epoch 46, batch 1000, loss[loss=0.1353, simple_loss=0.2179, pruned_loss=0.02637, over 19828.00 frames. ], tot_loss[loss=0.1476, simple_loss=0.2299, pruned_loss=0.03264, over 3923369.04 frames. ], batch size: 204, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 20:55:26,460 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-03-29 20:55:47,952 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7169, 3.1246, 3.5767, 3.1032, 3.8360, 3.8640, 4.4418, 4.9951], + device='cuda:0'), covar=tensor([0.0403, 0.1694, 0.1388, 0.2226, 0.1652, 0.1263, 0.0618, 0.0383], + device='cuda:0'), in_proj_covar=tensor([0.0266, 0.0250, 0.0277, 0.0266, 0.0312, 0.0269, 0.0244, 0.0276], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 20:55:53,747 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0834, 3.8263, 3.8741, 4.0712, 3.8462, 4.0072, 4.1141, 4.2941], + device='cuda:0'), covar=tensor([0.0661, 0.0498, 0.0575, 0.0442, 0.0755, 0.0664, 0.0502, 0.0333], + device='cuda:0'), in_proj_covar=tensor([0.0165, 0.0191, 0.0211, 0.0190, 0.0188, 0.0170, 0.0165, 0.0212], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 20:57:09,376 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84525.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:57:10,349 INFO [train.py:892] (0/4) Epoch 46, batch 1050, loss[loss=0.1432, simple_loss=0.2173, pruned_loss=0.03456, over 19875.00 frames. ], tot_loss[loss=0.1468, simple_loss=0.229, pruned_loss=0.03232, over 3930228.74 frames. ], batch size: 125, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 20:57:31,617 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.128e+02 3.422e+02 4.030e+02 5.116e+02 8.679e+02, threshold=8.059e+02, percent-clipped=4.0 +2023-03-29 20:58:20,450 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84555.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:59:10,804 INFO [train.py:892] (0/4) Epoch 46, batch 1100, loss[loss=0.1399, simple_loss=0.2256, pruned_loss=0.02706, over 19792.00 frames. ], tot_loss[loss=0.1471, simple_loss=0.2292, pruned_loss=0.03246, over 3935738.51 frames. ], batch size: 83, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 20:59:31,976 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3948, 4.1924, 4.2144, 3.9430, 4.3938, 3.1051, 3.6940, 2.1541], + device='cuda:0'), covar=tensor([0.0187, 0.0242, 0.0150, 0.0198, 0.0142, 0.0975, 0.0662, 0.1597], + device='cuda:0'), in_proj_covar=tensor([0.0111, 0.0156, 0.0120, 0.0142, 0.0126, 0.0141, 0.0147, 0.0134], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-29 20:59:34,450 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84586.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 20:59:48,841 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84592.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:00:22,830 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84605.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:01:12,894 INFO [train.py:892] (0/4) Epoch 46, batch 1150, loss[loss=0.2307, simple_loss=0.3193, pruned_loss=0.07111, over 19257.00 frames. ], tot_loss[loss=0.1467, simple_loss=0.2285, pruned_loss=0.03244, over 3939667.36 frames. ], batch size: 483, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 21:01:31,288 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84633.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:01:34,946 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.372e+02 3.414e+02 3.912e+02 4.793e+02 1.001e+03, threshold=7.825e+02, percent-clipped=1.0 +2023-03-29 21:01:52,722 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84641.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:02:22,228 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84653.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:03:17,268 INFO [train.py:892] (0/4) Epoch 46, batch 1200, loss[loss=0.1555, simple_loss=0.2415, pruned_loss=0.03478, over 19857.00 frames. ], tot_loss[loss=0.1475, simple_loss=0.2295, pruned_loss=0.03277, over 3941878.44 frames. ], batch size: 78, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 21:03:49,815 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84689.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:03:56,570 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84692.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:04:01,660 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84694.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:05:19,029 INFO [train.py:892] (0/4) Epoch 46, batch 1250, loss[loss=0.1562, simple_loss=0.2474, pruned_loss=0.03254, over 19818.00 frames. ], tot_loss[loss=0.1479, simple_loss=0.2298, pruned_loss=0.03293, over 3943366.94 frames. ], batch size: 57, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 21:05:19,897 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6500, 4.3917, 4.4477, 4.1792, 4.6566, 3.1176, 3.8803, 2.2231], + device='cuda:0'), covar=tensor([0.0173, 0.0243, 0.0145, 0.0189, 0.0143, 0.1026, 0.0717, 0.1547], + device='cuda:0'), in_proj_covar=tensor([0.0110, 0.0156, 0.0119, 0.0142, 0.0125, 0.0141, 0.0147, 0.0134], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-29 21:05:39,521 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.527e+02 3.466e+02 3.877e+02 4.842e+02 1.121e+03, threshold=7.755e+02, percent-clipped=3.0 +2023-03-29 21:06:28,393 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84753.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:06:39,253 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1353, 2.4179, 2.1784, 1.6752, 2.2295, 2.3899, 2.2775, 2.3791], + device='cuda:0'), covar=tensor([0.0498, 0.0372, 0.0397, 0.0599, 0.0435, 0.0348, 0.0366, 0.0325], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0109, 0.0110, 0.0110, 0.0113, 0.0100, 0.0101, 0.0100], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 21:07:12,924 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3780, 3.6280, 3.7662, 4.3424, 2.7979, 3.2935, 2.8059, 2.6949], + device='cuda:0'), covar=tensor([0.0517, 0.1938, 0.0999, 0.0474, 0.2137, 0.1256, 0.1319, 0.1695], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0332, 0.0258, 0.0216, 0.0252, 0.0220, 0.0227, 0.0223], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 21:07:25,115 INFO [train.py:892] (0/4) Epoch 46, batch 1300, loss[loss=0.1488, simple_loss=0.225, pruned_loss=0.03634, over 19872.00 frames. ], tot_loss[loss=0.149, simple_loss=0.2313, pruned_loss=0.03339, over 3945056.23 frames. ], batch size: 138, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 21:07:38,290 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.7229, 3.0327, 2.6466, 2.2910, 2.6725, 3.0171, 3.0026, 3.0213], + device='cuda:0'), covar=tensor([0.0446, 0.0351, 0.0385, 0.0622, 0.0441, 0.0334, 0.0274, 0.0280], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0109, 0.0110, 0.0110, 0.0114, 0.0100, 0.0101, 0.0100], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-03-29 21:08:25,751 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84800.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:08:27,579 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84801.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:09:27,286 INFO [train.py:892] (0/4) Epoch 46, batch 1350, loss[loss=0.1984, simple_loss=0.3151, pruned_loss=0.04081, over 18044.00 frames. ], tot_loss[loss=0.1491, simple_loss=0.2314, pruned_loss=0.03342, over 3944152.13 frames. ], batch size: 633, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 21:09:37,801 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3218, 4.2053, 4.5928, 4.1815, 3.8624, 4.4016, 4.2679, 4.6448], + device='cuda:0'), covar=tensor([0.0714, 0.0352, 0.0330, 0.0402, 0.0990, 0.0564, 0.0508, 0.0336], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0233, 0.0235, 0.0246, 0.0216, 0.0261, 0.0248, 0.0232], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 21:09:42,843 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.6415, 3.7443, 2.2543, 3.8902, 3.9888, 1.8973, 3.2991, 3.0913], + device='cuda:0'), covar=tensor([0.0839, 0.0883, 0.2959, 0.0835, 0.0706, 0.2826, 0.1200, 0.0974], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0271, 0.0241, 0.0291, 0.0272, 0.0210, 0.0248, 0.0212], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 21:09:50,711 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.135e+02 3.546e+02 4.239e+02 5.224e+02 8.019e+02, threshold=8.477e+02, percent-clipped=4.0 +2023-03-29 21:10:04,960 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-29 21:10:45,667 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84855.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:10:48,128 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7261, 3.1699, 3.5645, 2.9806, 3.8535, 3.7899, 4.4760, 4.9679], + device='cuda:0'), covar=tensor([0.0479, 0.1645, 0.1428, 0.2325, 0.1446, 0.1330, 0.0545, 0.0437], + device='cuda:0'), in_proj_covar=tensor([0.0265, 0.0248, 0.0276, 0.0265, 0.0311, 0.0268, 0.0242, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 21:11:00,358 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84861.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:11:35,522 INFO [train.py:892] (0/4) Epoch 46, batch 1400, loss[loss=0.1326, simple_loss=0.2083, pruned_loss=0.02847, over 19875.00 frames. ], tot_loss[loss=0.1478, simple_loss=0.2299, pruned_loss=0.03281, over 3946267.49 frames. ], batch size: 125, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 21:11:46,514 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84881.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:11:46,777 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0790, 3.3574, 3.5424, 3.9657, 2.7465, 3.0462, 2.8240, 2.6865], + device='cuda:0'), covar=tensor([0.0577, 0.1824, 0.0956, 0.0510, 0.1975, 0.1169, 0.1236, 0.1534], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0333, 0.0259, 0.0216, 0.0252, 0.0220, 0.0227, 0.0223], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 21:11:55,573 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.9836, 2.5289, 2.8958, 3.1091, 3.6473, 3.9660, 3.8136, 3.8667], + device='cuda:0'), covar=tensor([0.1000, 0.1666, 0.1365, 0.0823, 0.0461, 0.0269, 0.0414, 0.0460], + device='cuda:0'), in_proj_covar=tensor([0.0167, 0.0171, 0.0183, 0.0159, 0.0145, 0.0139, 0.0135, 0.0124], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 21:12:08,947 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84892.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:12:20,215 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9161, 3.8334, 3.7536, 3.5040, 3.8963, 2.8069, 3.2261, 1.9434], + device='cuda:0'), covar=tensor([0.0212, 0.0245, 0.0177, 0.0231, 0.0166, 0.1182, 0.0603, 0.1667], + device='cuda:0'), in_proj_covar=tensor([0.0111, 0.0156, 0.0119, 0.0143, 0.0125, 0.0140, 0.0147, 0.0134], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-29 21:12:38,245 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84903.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:12:43,121 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2869, 2.4746, 4.6646, 4.0355, 4.4029, 4.5807, 4.3932, 4.2395], + device='cuda:0'), covar=tensor([0.0684, 0.1180, 0.0108, 0.0762, 0.0172, 0.0215, 0.0186, 0.0190], + device='cuda:0'), in_proj_covar=tensor([0.0105, 0.0107, 0.0092, 0.0153, 0.0091, 0.0105, 0.0094, 0.0091], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 21:13:32,191 INFO [train.py:892] (0/4) Epoch 46, batch 1450, loss[loss=0.1196, simple_loss=0.1926, pruned_loss=0.02326, over 19803.00 frames. ], tot_loss[loss=0.1471, simple_loss=0.2295, pruned_loss=0.03235, over 3947816.92 frames. ], batch size: 117, lr: 3.38e-03, grad_scale: 8.0 +2023-03-29 21:13:55,750 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.367e+02 3.403e+02 4.016e+02 4.651e+02 7.911e+02, threshold=8.032e+02, percent-clipped=0.0 +2023-03-29 21:14:07,994 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84940.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:14:36,186 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84951.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:15:44,106 INFO [train.py:892] (0/4) Epoch 46, batch 1500, loss[loss=0.1404, simple_loss=0.2292, pruned_loss=0.02581, over 19832.00 frames. ], tot_loss[loss=0.1477, simple_loss=0.2301, pruned_loss=0.03262, over 3948899.30 frames. ], batch size: 52, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:16:15,224 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84989.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:16:22,570 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84992.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:17:16,682 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85012.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:17:49,528 INFO [train.py:892] (0/4) Epoch 46, batch 1550, loss[loss=0.1443, simple_loss=0.2184, pruned_loss=0.03516, over 19824.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.2313, pruned_loss=0.03321, over 3948051.19 frames. ], batch size: 127, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:17:58,177 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.0039, 4.6674, 4.7236, 4.4675, 5.0007, 3.2878, 4.0976, 2.5954], + device='cuda:0'), covar=tensor([0.0172, 0.0215, 0.0149, 0.0202, 0.0136, 0.0950, 0.0813, 0.1397], + device='cuda:0'), in_proj_covar=tensor([0.0111, 0.0156, 0.0119, 0.0143, 0.0125, 0.0140, 0.0147, 0.0134], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-29 21:18:12,464 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.101e+02 3.669e+02 4.198e+02 5.006e+02 8.856e+02, threshold=8.396e+02, percent-clipped=3.0 +2023-03-29 21:18:26,096 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85040.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:19:20,313 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5910, 3.4988, 3.9008, 3.5158, 3.4287, 3.8266, 3.6497, 3.9625], + device='cuda:0'), covar=tensor([0.1026, 0.0473, 0.0496, 0.0524, 0.1290, 0.0670, 0.0587, 0.0446], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0233, 0.0236, 0.0247, 0.0215, 0.0261, 0.0247, 0.0232], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 21:19:59,494 INFO [train.py:892] (0/4) Epoch 46, batch 1600, loss[loss=0.135, simple_loss=0.2199, pruned_loss=0.02503, over 19879.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.2309, pruned_loss=0.03321, over 3949054.38 frames. ], batch size: 95, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:21:21,142 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85107.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:21:42,025 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85115.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:22:09,244 INFO [train.py:892] (0/4) Epoch 46, batch 1650, loss[loss=0.1368, simple_loss=0.2291, pruned_loss=0.02229, over 19696.00 frames. ], tot_loss[loss=0.1489, simple_loss=0.2311, pruned_loss=0.03337, over 3949324.78 frames. ], batch size: 48, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:22:31,375 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.611e+02 3.617e+02 4.144e+02 5.075e+02 9.786e+02, threshold=8.288e+02, percent-clipped=2.0 +2023-03-29 21:23:27,032 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85156.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:23:56,942 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85168.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 21:24:15,557 INFO [train.py:892] (0/4) Epoch 46, batch 1700, loss[loss=0.1195, simple_loss=0.1911, pruned_loss=0.02392, over 19770.00 frames. ], tot_loss[loss=0.1483, simple_loss=0.2307, pruned_loss=0.03297, over 3950009.25 frames. ], batch size: 130, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:24:16,591 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85176.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:24:29,175 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85181.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:26:14,805 INFO [train.py:892] (0/4) Epoch 46, batch 1750, loss[loss=0.1446, simple_loss=0.2271, pruned_loss=0.03098, over 19661.00 frames. ], tot_loss[loss=0.1482, simple_loss=0.2305, pruned_loss=0.03289, over 3948889.02 frames. ], batch size: 43, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:26:22,219 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85229.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:26:35,386 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.122e+02 3.389e+02 3.888e+02 4.671e+02 1.140e+03, threshold=7.776e+02, percent-clipped=1.0 +2023-03-29 21:28:03,214 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5317, 2.6156, 1.7441, 2.8551, 2.6789, 2.7381, 2.8581, 2.3544], + device='cuda:0'), covar=tensor([0.0756, 0.0774, 0.1511, 0.0719, 0.0731, 0.0665, 0.0706, 0.0978], + device='cuda:0'), in_proj_covar=tensor([0.0151, 0.0152, 0.0148, 0.0162, 0.0140, 0.0147, 0.0156, 0.0154], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 21:28:04,553 INFO [train.py:892] (0/4) Epoch 46, batch 1800, loss[loss=0.1595, simple_loss=0.2384, pruned_loss=0.04031, over 19741.00 frames. ], tot_loss[loss=0.1488, simple_loss=0.2306, pruned_loss=0.03346, over 3949080.88 frames. ], batch size: 291, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:28:25,772 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0967, 3.0537, 4.7355, 3.4961, 3.7073, 3.4622, 2.6107, 2.7491], + device='cuda:0'), covar=tensor([0.0989, 0.3281, 0.0428, 0.1182, 0.1877, 0.1758, 0.2809, 0.2727], + device='cuda:0'), in_proj_covar=tensor([0.0361, 0.0407, 0.0358, 0.0301, 0.0383, 0.0405, 0.0393, 0.0370], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 21:28:30,713 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85289.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:29:00,271 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85307.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:29:37,867 INFO [train.py:892] (0/4) Epoch 46, batch 1850, loss[loss=0.1503, simple_loss=0.2365, pruned_loss=0.03205, over 19829.00 frames. ], tot_loss[loss=0.1493, simple_loss=0.232, pruned_loss=0.03327, over 3948565.90 frames. ], batch size: 57, lr: 3.37e-03, grad_scale: 8.0 +2023-03-29 21:29:45,695 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-46.pt +2023-03-29 21:30:45,948 INFO [train.py:892] (0/4) Epoch 47, batch 0, loss[loss=0.1461, simple_loss=0.2248, pruned_loss=0.03368, over 19837.00 frames. ], tot_loss[loss=0.1461, simple_loss=0.2248, pruned_loss=0.03368, over 19837.00 frames. ], batch size: 239, lr: 3.33e-03, grad_scale: 8.0 +2023-03-29 21:30:45,949 INFO [train.py:917] (0/4) Computing validation loss +2023-03-29 21:31:18,119 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1274, 2.9478, 4.8571, 3.5560, 3.8321, 3.3597, 2.5464, 2.6947], + device='cuda:0'), covar=tensor([0.1025, 0.3624, 0.0402, 0.1080, 0.1957, 0.1767, 0.3166, 0.2962], + device='cuda:0'), in_proj_covar=tensor([0.0361, 0.0408, 0.0358, 0.0301, 0.0383, 0.0406, 0.0394, 0.0371], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 21:31:22,863 INFO [train.py:926] (0/4) Epoch 47, validation: loss=0.1894, simple_loss=0.2504, pruned_loss=0.06424, over 2883724.00 frames. +2023-03-29 21:31:22,864 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-29 21:31:31,552 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.328e+02 3.387e+02 4.010e+02 4.758e+02 1.602e+03, threshold=8.020e+02, percent-clipped=2.0 +2023-03-29 21:31:38,868 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85337.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:33:29,565 INFO [train.py:892] (0/4) Epoch 47, batch 50, loss[loss=0.1284, simple_loss=0.2108, pruned_loss=0.02301, over 19791.00 frames. ], tot_loss[loss=0.1409, simple_loss=0.2217, pruned_loss=0.03007, over 891492.23 frames. ], batch size: 173, lr: 3.33e-03, grad_scale: 8.0 +2023-03-29 21:35:26,190 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85428.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:35:31,476 INFO [train.py:892] (0/4) Epoch 47, batch 100, loss[loss=0.1152, simple_loss=0.1946, pruned_loss=0.01788, over 19798.00 frames. ], tot_loss[loss=0.1442, simple_loss=0.2253, pruned_loss=0.03157, over 1570819.23 frames. ], batch size: 107, lr: 3.33e-03, grad_scale: 8.0 +2023-03-29 21:35:42,067 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.536e+02 3.357e+02 4.311e+02 4.946e+02 7.786e+02, threshold=8.621e+02, percent-clipped=0.0 +2023-03-29 21:36:33,764 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85456.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:36:49,730 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85463.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 21:37:08,367 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85471.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:37:33,154 INFO [train.py:892] (0/4) Epoch 47, batch 150, loss[loss=0.1497, simple_loss=0.2351, pruned_loss=0.03213, over 19644.00 frames. ], tot_loss[loss=0.1437, simple_loss=0.2244, pruned_loss=0.03146, over 2099849.16 frames. ], batch size: 72, lr: 3.33e-03, grad_scale: 8.0 +2023-03-29 21:37:37,155 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2520, 3.5202, 3.7375, 4.2765, 2.7817, 3.3110, 2.6125, 2.6453], + device='cuda:0'), covar=tensor([0.0511, 0.1868, 0.0884, 0.0427, 0.2080, 0.1021, 0.1381, 0.1632], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0331, 0.0258, 0.0216, 0.0253, 0.0220, 0.0227, 0.0223], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 21:37:39,880 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85483.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:37:55,765 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85489.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:38:35,445 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85504.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:38:40,571 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7723, 3.8619, 2.2863, 4.0023, 4.1554, 1.8683, 3.4526, 3.1544], + device='cuda:0'), covar=tensor([0.0822, 0.0811, 0.2860, 0.0850, 0.0604, 0.2795, 0.1140, 0.0977], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0272, 0.0241, 0.0293, 0.0272, 0.0210, 0.0249, 0.0213], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 21:39:40,453 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85530.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:39:42,113 INFO [train.py:892] (0/4) Epoch 47, batch 200, loss[loss=0.1458, simple_loss=0.2306, pruned_loss=0.03049, over 19738.00 frames. ], tot_loss[loss=0.146, simple_loss=0.2273, pruned_loss=0.03239, over 2510308.16 frames. ], batch size: 99, lr: 3.33e-03, grad_scale: 8.0 +2023-03-29 21:39:43,197 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.3750, 4.5613, 2.5990, 4.7788, 5.0447, 2.0900, 4.3411, 3.6767], + device='cuda:0'), covar=tensor([0.0675, 0.0684, 0.2702, 0.0664, 0.0410, 0.2839, 0.0884, 0.0895], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0273, 0.0242, 0.0295, 0.0274, 0.0212, 0.0251, 0.0214], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 21:39:50,775 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.307e+02 3.468e+02 4.248e+02 4.914e+02 6.825e+02, threshold=8.497e+02, percent-clipped=0.0 +2023-03-29 21:40:12,736 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85544.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:40:26,003 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-03-29 21:40:39,159 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=2.39 vs. limit=5.0 +2023-03-29 21:41:45,825 INFO [train.py:892] (0/4) Epoch 47, batch 250, loss[loss=0.1324, simple_loss=0.21, pruned_loss=0.02737, over 19822.00 frames. ], tot_loss[loss=0.1455, simple_loss=0.227, pruned_loss=0.03198, over 2829719.30 frames. ], batch size: 187, lr: 3.33e-03, grad_scale: 8.0 +2023-03-29 21:42:00,712 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.53 vs. limit=5.0 +2023-03-29 21:42:12,283 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85591.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:42:54,653 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85607.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:43:40,243 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85625.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:43:54,573 INFO [train.py:892] (0/4) Epoch 47, batch 300, loss[loss=0.1355, simple_loss=0.215, pruned_loss=0.028, over 19888.00 frames. ], tot_loss[loss=0.1458, simple_loss=0.2275, pruned_loss=0.03211, over 3078544.51 frames. ], batch size: 63, lr: 3.33e-03, grad_scale: 8.0 +2023-03-29 21:44:06,338 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.290e+02 3.428e+02 3.993e+02 4.702e+02 7.624e+02, threshold=7.986e+02, percent-clipped=0.0 +2023-03-29 21:44:50,531 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85655.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:45:49,321 INFO [train.py:892] (0/4) Epoch 47, batch 350, loss[loss=0.1358, simple_loss=0.2192, pruned_loss=0.0262, over 19897.00 frames. ], tot_loss[loss=0.1464, simple_loss=0.2281, pruned_loss=0.03232, over 3272139.69 frames. ], batch size: 113, lr: 3.32e-03, grad_scale: 8.0 +2023-03-29 21:46:02,112 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85686.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 21:47:05,286 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2162, 3.5133, 3.6975, 4.2326, 2.7965, 3.2086, 2.8191, 2.6956], + device='cuda:0'), covar=tensor([0.0566, 0.1891, 0.0974, 0.0456, 0.2073, 0.1150, 0.1325, 0.1594], + device='cuda:0'), in_proj_covar=tensor([0.0259, 0.0333, 0.0260, 0.0218, 0.0255, 0.0221, 0.0228, 0.0224], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 21:47:46,168 INFO [train.py:892] (0/4) Epoch 47, batch 400, loss[loss=0.1351, simple_loss=0.2154, pruned_loss=0.02742, over 19858.00 frames. ], tot_loss[loss=0.147, simple_loss=0.2291, pruned_loss=0.03246, over 3420284.35 frames. ], batch size: 106, lr: 3.32e-03, grad_scale: 8.0 +2023-03-29 21:47:56,635 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.491e+02 3.390e+02 3.969e+02 4.785e+02 1.194e+03, threshold=7.938e+02, percent-clipped=3.0 +2023-03-29 21:48:51,531 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4550, 3.6746, 2.3688, 4.2962, 3.8270, 4.1861, 4.2612, 3.3994], + device='cuda:0'), covar=tensor([0.0683, 0.0591, 0.1430, 0.0564, 0.0583, 0.0406, 0.0555, 0.0803], + device='cuda:0'), in_proj_covar=tensor([0.0153, 0.0154, 0.0150, 0.0164, 0.0143, 0.0149, 0.0159, 0.0157], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0004], + device='cuda:0') +2023-03-29 21:49:02,454 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85763.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:49:20,494 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85771.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:49:46,487 INFO [train.py:892] (0/4) Epoch 47, batch 450, loss[loss=0.137, simple_loss=0.2118, pruned_loss=0.03109, over 19840.00 frames. ], tot_loss[loss=0.1472, simple_loss=0.2296, pruned_loss=0.0324, over 3538018.57 frames. ], batch size: 143, lr: 3.32e-03, grad_scale: 8.0 +2023-03-29 21:49:53,646 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85784.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:50:56,081 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85811.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:51:16,844 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85819.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:51:43,848 INFO [train.py:892] (0/4) Epoch 47, batch 500, loss[loss=0.1512, simple_loss=0.2369, pruned_loss=0.03281, over 19645.00 frames. ], tot_loss[loss=0.1472, simple_loss=0.2295, pruned_loss=0.03248, over 3628700.77 frames. ], batch size: 299, lr: 3.32e-03, grad_scale: 8.0 +2023-03-29 21:51:52,517 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.359e+02 3.439e+02 4.112e+02 5.000e+02 9.207e+02, threshold=8.225e+02, percent-clipped=1.0 +2023-03-29 21:52:01,390 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85839.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:53:17,863 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.1906, 5.4601, 5.5225, 5.3848, 5.1986, 5.4795, 4.9412, 4.9649], + device='cuda:0'), covar=tensor([0.0476, 0.0508, 0.0442, 0.0413, 0.0570, 0.0483, 0.0669, 0.0960], + device='cuda:0'), in_proj_covar=tensor([0.0298, 0.0318, 0.0326, 0.0285, 0.0297, 0.0277, 0.0289, 0.0337], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 21:53:23,859 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85875.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:53:36,869 INFO [train.py:892] (0/4) Epoch 47, batch 550, loss[loss=0.132, simple_loss=0.2129, pruned_loss=0.02558, over 19786.00 frames. ], tot_loss[loss=0.1468, simple_loss=0.2288, pruned_loss=0.03239, over 3700695.47 frames. ], batch size: 83, lr: 3.32e-03, grad_scale: 8.0 +2023-03-29 21:53:50,045 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85886.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:55:33,519 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85930.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 21:55:34,548 INFO [train.py:892] (0/4) Epoch 47, batch 600, loss[loss=0.1504, simple_loss=0.2296, pruned_loss=0.0356, over 19744.00 frames. ], tot_loss[loss=0.1467, simple_loss=0.2282, pruned_loss=0.03261, over 3756673.02 frames. ], batch size: 97, lr: 3.32e-03, grad_scale: 16.0 +2023-03-29 21:55:42,765 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.322e+02 3.636e+02 4.237e+02 5.117e+02 1.424e+03, threshold=8.474e+02, percent-clipped=3.0 +2023-03-29 21:55:45,748 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85936.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:56:37,099 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85957.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:56:49,196 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1771, 3.1335, 4.6972, 3.5317, 3.8126, 3.5632, 2.6269, 2.8937], + device='cuda:0'), covar=tensor([0.0965, 0.3165, 0.0456, 0.1143, 0.1764, 0.1566, 0.2814, 0.2562], + device='cuda:0'), in_proj_covar=tensor([0.0360, 0.0408, 0.0358, 0.0301, 0.0382, 0.0406, 0.0393, 0.0370], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 21:57:01,776 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.9343, 5.1133, 5.3635, 5.0903, 5.1902, 4.9702, 5.1182, 4.8670], + device='cuda:0'), covar=tensor([0.1543, 0.1529, 0.0862, 0.1370, 0.0756, 0.0858, 0.1705, 0.1990], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0359, 0.0389, 0.0320, 0.0296, 0.0301, 0.0380, 0.0412], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 21:57:31,390 INFO [train.py:892] (0/4) Epoch 47, batch 650, loss[loss=0.1571, simple_loss=0.2373, pruned_loss=0.03846, over 19700.00 frames. ], tot_loss[loss=0.1476, simple_loss=0.2289, pruned_loss=0.03308, over 3798864.45 frames. ], batch size: 265, lr: 3.32e-03, grad_scale: 16.0 +2023-03-29 21:57:32,201 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85981.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 21:57:58,243 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85991.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 21:58:06,868 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.8718, 4.6202, 5.2985, 4.7243, 4.2894, 4.9802, 4.9287, 5.4045], + device='cuda:0'), covar=tensor([0.0900, 0.0458, 0.0346, 0.0417, 0.0770, 0.0541, 0.0492, 0.0345], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0234, 0.0237, 0.0249, 0.0217, 0.0264, 0.0249, 0.0235], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 21:58:20,822 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-86000.pt +2023-03-29 21:58:30,295 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86002.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:58:47,830 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-03-29 21:59:05,153 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86018.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 21:59:14,329 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1910, 2.1286, 2.2465, 2.1755, 2.2907, 2.2950, 2.2070, 2.2967], + device='cuda:0'), covar=tensor([0.0490, 0.0445, 0.0419, 0.0409, 0.0487, 0.0409, 0.0525, 0.0391], + device='cuda:0'), in_proj_covar=tensor([0.0100, 0.0093, 0.0096, 0.0091, 0.0103, 0.0096, 0.0112, 0.0084], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 21:59:35,616 INFO [train.py:892] (0/4) Epoch 47, batch 700, loss[loss=0.1417, simple_loss=0.2267, pruned_loss=0.02836, over 19855.00 frames. ], tot_loss[loss=0.1464, simple_loss=0.2281, pruned_loss=0.03236, over 3833547.05 frames. ], batch size: 85, lr: 3.32e-03, grad_scale: 16.0 +2023-03-29 21:59:44,261 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.350e+02 3.531e+02 4.077e+02 5.107e+02 6.974e+02, threshold=8.153e+02, percent-clipped=0.0 +2023-03-29 22:00:24,747 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86052.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:00:51,335 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86063.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:01:00,232 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.6898, 2.6903, 2.8666, 2.3464, 2.9065, 2.4415, 2.7956, 2.8034], + device='cuda:0'), covar=tensor([0.0628, 0.0643, 0.0552, 0.0886, 0.0499, 0.0579, 0.0556, 0.0421], + device='cuda:0'), in_proj_covar=tensor([0.0087, 0.0097, 0.0093, 0.0117, 0.0089, 0.0092, 0.0089, 0.0084], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 22:01:06,919 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86069.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:01:36,225 INFO [train.py:892] (0/4) Epoch 47, batch 750, loss[loss=0.1372, simple_loss=0.2141, pruned_loss=0.03017, over 19781.00 frames. ], tot_loss[loss=0.1462, simple_loss=0.2283, pruned_loss=0.03208, over 3858982.65 frames. ], batch size: 131, lr: 3.32e-03, grad_scale: 16.0 +2023-03-29 22:01:43,023 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86084.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:01:49,021 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.1566, 4.0243, 3.9826, 3.7140, 4.1586, 2.8836, 3.4723, 1.9578], + device='cuda:0'), covar=tensor([0.0210, 0.0246, 0.0159, 0.0232, 0.0167, 0.1204, 0.0709, 0.1693], + device='cuda:0'), in_proj_covar=tensor([0.0111, 0.0157, 0.0120, 0.0143, 0.0126, 0.0141, 0.0148, 0.0134], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-29 22:02:29,443 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.8878, 2.8324, 4.7133, 3.3447, 3.5163, 3.3180, 2.5106, 2.7103], + device='cuda:0'), covar=tensor([0.1304, 0.3931, 0.0533, 0.1326, 0.2457, 0.1920, 0.3155, 0.2857], + device='cuda:0'), in_proj_covar=tensor([0.0363, 0.0411, 0.0361, 0.0303, 0.0385, 0.0409, 0.0396, 0.0372], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 22:02:47,816 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86113.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:03:24,178 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86130.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:03:25,074 INFO [train.py:892] (0/4) Epoch 47, batch 800, loss[loss=0.1337, simple_loss=0.2111, pruned_loss=0.02822, over 19819.00 frames. ], tot_loss[loss=0.1472, simple_loss=0.2293, pruned_loss=0.0326, over 3878783.79 frames. ], batch size: 103, lr: 3.32e-03, grad_scale: 16.0 +2023-03-29 22:03:27,721 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86132.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:03:35,177 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.984e+02 3.596e+02 4.313e+02 5.169e+02 1.005e+03, threshold=8.627e+02, percent-clipped=4.0 +2023-03-29 22:03:46,483 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86139.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:05:09,912 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.1421, 2.0820, 2.2002, 2.2204, 2.2055, 2.2670, 2.1830, 2.2242], + device='cuda:0'), covar=tensor([0.0437, 0.0438, 0.0419, 0.0368, 0.0522, 0.0354, 0.0522, 0.0396], + device='cuda:0'), in_proj_covar=tensor([0.0100, 0.0094, 0.0097, 0.0091, 0.0104, 0.0097, 0.0112, 0.0085], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 22:05:24,302 INFO [train.py:892] (0/4) Epoch 47, batch 850, loss[loss=0.133, simple_loss=0.2143, pruned_loss=0.02584, over 19719.00 frames. ], tot_loss[loss=0.1468, simple_loss=0.2288, pruned_loss=0.0324, over 3894636.46 frames. ], batch size: 54, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:05:37,615 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86186.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:05:41,001 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86187.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:07:21,645 INFO [train.py:892] (0/4) Epoch 47, batch 900, loss[loss=0.1389, simple_loss=0.2141, pruned_loss=0.03186, over 19783.00 frames. ], tot_loss[loss=0.1463, simple_loss=0.2281, pruned_loss=0.03221, over 3907413.93 frames. ], batch size: 45, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:07:22,439 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86231.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:07:28,925 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86234.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:07:30,577 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.206e+02 3.419e+02 4.069e+02 4.713e+02 1.195e+03, threshold=8.139e+02, percent-clipped=3.0 +2023-03-29 22:07:56,322 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86246.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:09:15,425 INFO [train.py:892] (0/4) Epoch 47, batch 950, loss[loss=0.1239, simple_loss=0.2065, pruned_loss=0.02068, over 19854.00 frames. ], tot_loss[loss=0.1461, simple_loss=0.2282, pruned_loss=0.03201, over 3915289.53 frames. ], batch size: 85, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:09:16,240 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86281.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:09:16,483 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3374, 2.5429, 3.8494, 2.9913, 3.1061, 2.9036, 2.2160, 2.3840], + device='cuda:0'), covar=tensor([0.1368, 0.3374, 0.0658, 0.1221, 0.2101, 0.1792, 0.2840, 0.3027], + device='cuda:0'), in_proj_covar=tensor([0.0361, 0.0410, 0.0360, 0.0301, 0.0383, 0.0407, 0.0395, 0.0371], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 22:09:24,318 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.4818, 4.7431, 5.2029, 4.6489, 4.2904, 5.0290, 4.9129, 5.4083], + device='cuda:0'), covar=tensor([0.1246, 0.0385, 0.0497, 0.0458, 0.0818, 0.0521, 0.0512, 0.0348], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0233, 0.0235, 0.0248, 0.0216, 0.0264, 0.0249, 0.0235], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 22:09:26,928 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-29 22:09:28,414 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86286.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 22:10:16,751 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86307.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:10:18,927 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.5570, 2.5943, 1.7764, 2.8234, 2.6509, 2.7469, 2.8539, 2.3480], + device='cuda:0'), covar=tensor([0.0764, 0.0811, 0.1570, 0.0723, 0.0715, 0.0644, 0.0651, 0.0986], + device='cuda:0'), in_proj_covar=tensor([0.0152, 0.0152, 0.0148, 0.0162, 0.0141, 0.0148, 0.0157, 0.0155], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:0') +2023-03-29 22:10:29,216 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86313.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:10:43,975 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.4409, 2.6622, 4.0800, 3.1776, 3.2736, 3.0268, 2.3136, 2.4967], + device='cuda:0'), covar=tensor([0.1493, 0.3579, 0.0622, 0.1227, 0.2075, 0.1901, 0.2990, 0.3038], + device='cuda:0'), in_proj_covar=tensor([0.0362, 0.0411, 0.0360, 0.0302, 0.0384, 0.0408, 0.0396, 0.0372], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 22:11:06,181 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-29 22:11:07,304 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86329.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:11:10,601 INFO [train.py:892] (0/4) Epoch 47, batch 1000, loss[loss=0.1676, simple_loss=0.2638, pruned_loss=0.03571, over 19815.00 frames. ], tot_loss[loss=0.1462, simple_loss=0.2285, pruned_loss=0.0319, over 3923064.56 frames. ], batch size: 57, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:11:20,547 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.209e+02 3.279e+02 4.003e+02 4.723e+02 7.348e+02, threshold=8.007e+02, percent-clipped=0.0 +2023-03-29 22:12:16,400 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86358.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:13:07,767 INFO [train.py:892] (0/4) Epoch 47, batch 1050, loss[loss=0.1266, simple_loss=0.2006, pruned_loss=0.02626, over 19763.00 frames. ], tot_loss[loss=0.1464, simple_loss=0.2287, pruned_loss=0.0321, over 3930582.30 frames. ], batch size: 182, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:14:11,593 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86408.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:14:51,434 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86425.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:14:58,725 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.2612, 3.0107, 3.3600, 2.8543, 3.4676, 3.4222, 4.0745, 4.4477], + device='cuda:0'), covar=tensor([0.0556, 0.1702, 0.1533, 0.2310, 0.1719, 0.1515, 0.0665, 0.0636], + device='cuda:0'), in_proj_covar=tensor([0.0267, 0.0250, 0.0280, 0.0267, 0.0314, 0.0269, 0.0244, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 22:15:05,565 INFO [train.py:892] (0/4) Epoch 47, batch 1100, loss[loss=0.1964, simple_loss=0.313, pruned_loss=0.03995, over 17968.00 frames. ], tot_loss[loss=0.1478, simple_loss=0.2304, pruned_loss=0.0326, over 3931013.24 frames. ], batch size: 633, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:15:16,005 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.572e+02 3.379e+02 4.084e+02 4.897e+02 1.462e+03, threshold=8.168e+02, percent-clipped=5.0 +2023-03-29 22:15:27,600 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86440.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:15:40,247 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86445.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:17:00,652 INFO [train.py:892] (0/4) Epoch 47, batch 1150, loss[loss=0.1529, simple_loss=0.2443, pruned_loss=0.03069, over 19875.00 frames. ], tot_loss[loss=0.1475, simple_loss=0.23, pruned_loss=0.03253, over 3936547.73 frames. ], batch size: 53, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:17:19,386 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.9341, 2.9445, 4.6151, 3.4762, 3.6688, 3.4087, 2.5293, 2.6677], + device='cuda:0'), covar=tensor([0.1080, 0.3136, 0.0421, 0.1102, 0.1836, 0.1499, 0.2678, 0.2602], + device='cuda:0'), in_proj_covar=tensor([0.0360, 0.0408, 0.0357, 0.0300, 0.0382, 0.0405, 0.0393, 0.0370], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 22:17:46,806 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86501.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:17:59,627 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86506.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 22:18:58,202 INFO [train.py:892] (0/4) Epoch 47, batch 1200, loss[loss=0.1269, simple_loss=0.2103, pruned_loss=0.02179, over 19713.00 frames. ], tot_loss[loss=0.1478, simple_loss=0.2304, pruned_loss=0.03257, over 3939708.30 frames. ], batch size: 104, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:18:59,128 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86531.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:19:07,246 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.118e+02 3.301e+02 3.835e+02 4.475e+02 8.063e+02, threshold=7.670e+02, percent-clipped=0.0 +2023-03-29 22:19:40,093 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-29 22:20:44,475 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86579.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:20:47,263 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86580.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:20:49,866 INFO [train.py:892] (0/4) Epoch 47, batch 1250, loss[loss=0.1598, simple_loss=0.2384, pruned_loss=0.0406, over 19752.00 frames. ], tot_loss[loss=0.1474, simple_loss=0.2298, pruned_loss=0.03252, over 3941981.26 frames. ], batch size: 256, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:21:02,340 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86586.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 22:21:41,832 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86602.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:22:06,118 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86613.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:22:49,114 INFO [train.py:892] (0/4) Epoch 47, batch 1300, loss[loss=0.1588, simple_loss=0.2423, pruned_loss=0.03769, over 19894.00 frames. ], tot_loss[loss=0.1478, simple_loss=0.2305, pruned_loss=0.0325, over 3942302.97 frames. ], batch size: 94, lr: 3.31e-03, grad_scale: 16.0 +2023-03-29 22:22:57,914 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86634.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 22:22:59,161 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.242e+02 3.251e+02 3.887e+02 4.565e+02 9.819e+02, threshold=7.775e+02, percent-clipped=3.0 +2023-03-29 22:23:12,345 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86641.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:23:21,549 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([2.3457, 2.6076, 2.3834, 1.8927, 2.4255, 2.5647, 2.5617, 2.5672], + device='cuda:0'), covar=tensor([0.0484, 0.0340, 0.0385, 0.0584, 0.0407, 0.0421, 0.0328, 0.0303], + device='cuda:0'), in_proj_covar=tensor([0.0119, 0.0112, 0.0112, 0.0112, 0.0116, 0.0102, 0.0104, 0.0102], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 22:23:53,601 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86658.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:24:00,032 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86661.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:24:43,563 INFO [train.py:892] (0/4) Epoch 47, batch 1350, loss[loss=0.1643, simple_loss=0.2488, pruned_loss=0.03987, over 19691.00 frames. ], tot_loss[loss=0.1484, simple_loss=0.2309, pruned_loss=0.0329, over 3944154.53 frames. ], batch size: 337, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:25:44,625 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86706.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:25:48,878 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86708.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:26:27,061 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86725.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:26:40,899 INFO [train.py:892] (0/4) Epoch 47, batch 1400, loss[loss=0.1459, simple_loss=0.2269, pruned_loss=0.03239, over 19714.00 frames. ], tot_loss[loss=0.1481, simple_loss=0.2304, pruned_loss=0.03292, over 3944824.19 frames. ], batch size: 85, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:26:49,073 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.330e+02 3.187e+02 3.920e+02 5.050e+02 7.981e+02, threshold=7.841e+02, percent-clipped=2.0 +2023-03-29 22:27:34,484 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86756.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:28:14,195 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86773.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:28:29,154 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-29 22:28:33,041 INFO [train.py:892] (0/4) Epoch 47, batch 1450, loss[loss=0.1681, simple_loss=0.2483, pruned_loss=0.04391, over 19800.00 frames. ], tot_loss[loss=0.1478, simple_loss=0.2299, pruned_loss=0.03284, over 3947203.60 frames. ], batch size: 68, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:29:06,617 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86796.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:29:20,738 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86801.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 22:29:49,555 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86814.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:30:28,323 INFO [train.py:892] (0/4) Epoch 47, batch 1500, loss[loss=0.1486, simple_loss=0.233, pruned_loss=0.03207, over 19773.00 frames. ], tot_loss[loss=0.1477, simple_loss=0.2303, pruned_loss=0.0326, over 3946125.06 frames. ], batch size: 241, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:30:37,009 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.591e+02 3.342e+02 4.163e+02 4.864e+02 6.569e+02, threshold=8.326e+02, percent-clipped=0.0 +2023-03-29 22:31:04,595 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.2113, 3.1088, 3.4429, 3.1561, 3.0211, 3.4233, 3.3056, 3.5005], + device='cuda:0'), covar=tensor([0.0831, 0.0418, 0.0414, 0.0460, 0.1783, 0.0607, 0.0493, 0.0415], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0234, 0.0236, 0.0246, 0.0215, 0.0263, 0.0248, 0.0235], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 22:31:22,741 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([5.4772, 5.8904, 6.0358, 5.7502, 5.6599, 5.7315, 5.7383, 5.4972], + device='cuda:0'), covar=tensor([0.1389, 0.1240, 0.0766, 0.1172, 0.0670, 0.0707, 0.1583, 0.1883], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0360, 0.0390, 0.0322, 0.0297, 0.0304, 0.0382, 0.0412], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:0') +2023-03-29 22:32:11,737 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86875.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:32:18,103 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86878.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:32:23,358 INFO [train.py:892] (0/4) Epoch 47, batch 1550, loss[loss=0.1684, simple_loss=0.246, pruned_loss=0.04542, over 19837.00 frames. ], tot_loss[loss=0.1472, simple_loss=0.2296, pruned_loss=0.03237, over 3947692.07 frames. ], batch size: 239, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:33:10,770 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86902.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:33:57,148 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-29 22:34:19,996 INFO [train.py:892] (0/4) Epoch 47, batch 1600, loss[loss=0.1349, simple_loss=0.207, pruned_loss=0.0314, over 19783.00 frames. ], tot_loss[loss=0.1478, simple_loss=0.2308, pruned_loss=0.03241, over 3945816.86 frames. ], batch size: 163, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:34:27,989 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.263e+02 3.386e+02 4.039e+02 4.750e+02 1.112e+03, threshold=8.078e+02, percent-clipped=1.0 +2023-03-29 22:34:30,876 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86936.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:34:37,100 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86939.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:35:03,403 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86950.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:36:13,791 INFO [train.py:892] (0/4) Epoch 47, batch 1650, loss[loss=0.1283, simple_loss=0.203, pruned_loss=0.02675, over 19867.00 frames. ], tot_loss[loss=0.1472, simple_loss=0.23, pruned_loss=0.03218, over 3946847.64 frames. ], batch size: 158, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:37:07,447 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87002.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 22:38:05,802 INFO [train.py:892] (0/4) Epoch 47, batch 1700, loss[loss=0.1396, simple_loss=0.2134, pruned_loss=0.03293, over 19781.00 frames. ], tot_loss[loss=0.1462, simple_loss=0.2285, pruned_loss=0.03189, over 3949210.16 frames. ], batch size: 131, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:38:14,357 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 1.986e+02 3.322e+02 4.069e+02 4.865e+02 7.645e+02, threshold=8.138e+02, percent-clipped=0.0 +2023-03-29 22:38:47,048 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-29 22:39:20,563 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87063.0, num_to_drop=1, layers_to_drop={3} +2023-03-29 22:39:56,684 INFO [train.py:892] (0/4) Epoch 47, batch 1750, loss[loss=0.1338, simple_loss=0.2107, pruned_loss=0.02844, over 19819.00 frames. ], tot_loss[loss=0.1472, simple_loss=0.2294, pruned_loss=0.03255, over 3947197.27 frames. ], batch size: 127, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:40:26,901 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87096.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:40:36,101 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87101.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 22:41:33,780 INFO [train.py:892] (0/4) Epoch 47, batch 1800, loss[loss=0.1446, simple_loss=0.2171, pruned_loss=0.03609, over 19813.00 frames. ], tot_loss[loss=0.1465, simple_loss=0.2286, pruned_loss=0.03221, over 3948086.59 frames. ], batch size: 148, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:41:41,045 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.449e+02 3.587e+02 4.161e+02 4.922e+02 1.323e+03, threshold=8.323e+02, percent-clipped=3.0 +2023-03-29 22:41:59,480 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87144.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:42:08,169 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87149.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:42:12,202 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3875, 3.2812, 3.5761, 2.7959, 3.6691, 3.1450, 3.4102, 3.6727], + device='cuda:0'), covar=tensor([0.0788, 0.0514, 0.0638, 0.0840, 0.0466, 0.0477, 0.0428, 0.0332], + device='cuda:0'), in_proj_covar=tensor([0.0087, 0.0097, 0.0093, 0.0117, 0.0089, 0.0092, 0.0088, 0.0084], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 22:42:35,949 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.7577, 3.2063, 3.6179, 3.2009, 3.8454, 3.8344, 4.5065, 5.0383], + device='cuda:0'), covar=tensor([0.0473, 0.1666, 0.1473, 0.2162, 0.1658, 0.1424, 0.0585, 0.0371], + device='cuda:0'), in_proj_covar=tensor([0.0268, 0.0251, 0.0281, 0.0267, 0.0315, 0.0271, 0.0244, 0.0278], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 22:42:47,059 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87170.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:43:06,741 INFO [train.py:892] (0/4) Epoch 47, batch 1850, loss[loss=0.154, simple_loss=0.247, pruned_loss=0.03054, over 19819.00 frames. ], tot_loss[loss=0.1471, simple_loss=0.2305, pruned_loss=0.03182, over 3945012.01 frames. ], batch size: 57, lr: 3.30e-03, grad_scale: 16.0 +2023-03-29 22:43:14,339 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/epoch-47.pt +2023-03-29 22:44:09,128 INFO [train.py:892] (0/4) Epoch 48, batch 0, loss[loss=0.1445, simple_loss=0.2249, pruned_loss=0.03202, over 19844.00 frames. ], tot_loss[loss=0.1445, simple_loss=0.2249, pruned_loss=0.03202, over 19844.00 frames. ], batch size: 85, lr: 3.26e-03, grad_scale: 16.0 +2023-03-29 22:44:09,129 INFO [train.py:917] (0/4) Computing validation loss +2023-03-29 22:44:44,075 INFO [train.py:926] (0/4) Epoch 48, validation: loss=0.1901, simple_loss=0.2508, pruned_loss=0.06469, over 2883724.00 frames. +2023-03-29 22:44:44,076 INFO [train.py:927] (0/4) Maximum memory allocated so far is 22793MB +2023-03-29 22:46:30,414 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87230.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:46:40,125 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87234.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:46:41,364 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.137e+02 3.310e+02 3.743e+02 4.381e+02 8.999e+02, threshold=7.487e+02, percent-clipped=2.0 +2023-03-29 22:46:45,732 INFO [train.py:892] (0/4) Epoch 48, batch 50, loss[loss=0.1374, simple_loss=0.2213, pruned_loss=0.0267, over 19750.00 frames. ], tot_loss[loss=0.145, simple_loss=0.2265, pruned_loss=0.03177, over 890335.81 frames. ], batch size: 276, lr: 3.26e-03, grad_scale: 16.0 +2023-03-29 22:46:46,450 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87236.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:48:04,645 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.1318, 2.5235, 3.0708, 3.2361, 3.7655, 4.2221, 4.0717, 4.1779], + device='cuda:0'), covar=tensor([0.1020, 0.1836, 0.1352, 0.0780, 0.0477, 0.0288, 0.0368, 0.0395], + device='cuda:0'), in_proj_covar=tensor([0.0169, 0.0173, 0.0184, 0.0161, 0.0147, 0.0142, 0.0137, 0.0126], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 22:48:04,725 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5276, 3.7607, 3.9075, 4.5773, 2.9886, 3.4155, 2.9102, 2.7913], + device='cuda:0'), covar=tensor([0.0526, 0.2084, 0.0970, 0.0444, 0.2126, 0.1242, 0.1366, 0.1690], + device='cuda:0'), in_proj_covar=tensor([0.0255, 0.0331, 0.0258, 0.0217, 0.0253, 0.0218, 0.0227, 0.0223], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 22:48:38,627 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87284.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:48:42,205 INFO [train.py:892] (0/4) Epoch 48, batch 100, loss[loss=0.1292, simple_loss=0.2082, pruned_loss=0.02509, over 19751.00 frames. ], tot_loss[loss=0.1432, simple_loss=0.2259, pruned_loss=0.03023, over 1568310.80 frames. ], batch size: 179, lr: 3.26e-03, grad_scale: 16.0 +2023-03-29 22:48:53,648 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87291.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:50:36,653 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.455e+02 3.250e+02 3.994e+02 4.855e+02 8.208e+02, threshold=7.988e+02, percent-clipped=2.0 +2023-03-29 22:50:39,117 INFO [train.py:892] (0/4) Epoch 48, batch 150, loss[loss=0.1381, simple_loss=0.22, pruned_loss=0.02805, over 19780.00 frames. ], tot_loss[loss=0.1443, simple_loss=0.2265, pruned_loss=0.03103, over 2095655.65 frames. ], batch size: 263, lr: 3.26e-03, grad_scale: 16.0 +2023-03-29 22:51:32,746 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87358.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 22:51:41,563 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87362.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:52:27,199 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-29 22:52:33,955 INFO [train.py:892] (0/4) Epoch 48, batch 200, loss[loss=0.1298, simple_loss=0.208, pruned_loss=0.02576, over 19795.00 frames. ], tot_loss[loss=0.1483, simple_loss=0.2306, pruned_loss=0.03297, over 2506682.23 frames. ], batch size: 185, lr: 3.26e-03, grad_scale: 16.0 +2023-03-29 22:53:13,692 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-03-29 22:54:00,823 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87423.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:54:24,399 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.319e+02 3.416e+02 4.017e+02 4.841e+02 7.070e+02, threshold=8.034e+02, percent-clipped=0.0 +2023-03-29 22:54:26,337 INFO [train.py:892] (0/4) Epoch 48, batch 250, loss[loss=0.126, simple_loss=0.2064, pruned_loss=0.02276, over 19702.00 frames. ], tot_loss[loss=0.1466, simple_loss=0.2289, pruned_loss=0.03218, over 2826903.22 frames. ], batch size: 81, lr: 3.26e-03, grad_scale: 16.0 +2023-03-29 22:55:23,108 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87461.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:55:38,574 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.5760, 2.5705, 4.9870, 4.0922, 4.6598, 4.8766, 4.6576, 4.5670], + device='cuda:0'), covar=tensor([0.0660, 0.1202, 0.0112, 0.0905, 0.0153, 0.0210, 0.0163, 0.0170], + device='cuda:0'), in_proj_covar=tensor([0.0105, 0.0107, 0.0093, 0.0154, 0.0092, 0.0105, 0.0094, 0.0091], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 22:55:44,967 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87470.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:56:19,967 INFO [train.py:892] (0/4) Epoch 48, batch 300, loss[loss=0.1298, simple_loss=0.2123, pruned_loss=0.02364, over 19851.00 frames. ], tot_loss[loss=0.1459, simple_loss=0.2282, pruned_loss=0.03178, over 3076997.05 frames. ], batch size: 190, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 22:56:49,812 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.7945, 2.7446, 3.0172, 2.6429, 3.1135, 3.0855, 3.5780, 3.9220], + device='cuda:0'), covar=tensor([0.0648, 0.1849, 0.1665, 0.2256, 0.1722, 0.1652, 0.0772, 0.0781], + device='cuda:0'), in_proj_covar=tensor([0.0267, 0.0250, 0.0280, 0.0267, 0.0314, 0.0270, 0.0244, 0.0278], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 22:57:07,874 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87505.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:57:33,927 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-29 22:57:37,239 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87518.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:57:45,584 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87522.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:57:54,516 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-03-29 22:58:14,308 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87534.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 22:58:15,390 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.562e+02 3.502e+02 4.019e+02 4.844e+02 7.845e+02, threshold=8.037e+02, percent-clipped=0.0 +2023-03-29 22:58:18,649 INFO [train.py:892] (0/4) Epoch 48, batch 350, loss[loss=0.1512, simple_loss=0.237, pruned_loss=0.03274, over 19837.00 frames. ], tot_loss[loss=0.1458, simple_loss=0.2283, pruned_loss=0.03167, over 3270371.36 frames. ], batch size: 239, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 22:58:49,440 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.5020, 3.8290, 3.9202, 4.5146, 2.9170, 3.3510, 2.7800, 2.8043], + device='cuda:0'), covar=tensor([0.0505, 0.1668, 0.0890, 0.0405, 0.2029, 0.1086, 0.1387, 0.1557], + device='cuda:0'), in_proj_covar=tensor([0.0256, 0.0330, 0.0258, 0.0217, 0.0254, 0.0218, 0.0227, 0.0223], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003], + device='cuda:0') +2023-03-29 22:59:29,642 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87566.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 23:00:10,271 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87582.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:00:14,659 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87584.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:00:15,119 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=3.38 vs. limit=5.0 +2023-03-29 23:00:17,793 INFO [train.py:892] (0/4) Epoch 48, batch 400, loss[loss=0.1354, simple_loss=0.2174, pruned_loss=0.02677, over 19753.00 frames. ], tot_loss[loss=0.1452, simple_loss=0.2275, pruned_loss=0.03146, over 3422095.02 frames. ], batch size: 97, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 23:00:18,773 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87586.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:00:25,645 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87589.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:01:03,117 INFO [zipformer.py:625] (0/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87605.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:02:10,863 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.523e+02 3.389e+02 3.932e+02 4.658e+02 8.787e+02, threshold=7.864e+02, percent-clipped=1.0 +2023-03-29 23:02:13,017 INFO [train.py:892] (0/4) Epoch 48, batch 450, loss[loss=0.144, simple_loss=0.23, pruned_loss=0.02903, over 19697.00 frames. ], tot_loss[loss=0.1455, simple_loss=0.2284, pruned_loss=0.03125, over 3537764.03 frames. ], batch size: 82, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 23:02:36,799 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87645.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 23:02:48,760 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87650.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:03:06,994 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87658.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 23:03:24,670 INFO [zipformer.py:625] (0/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87666.0, num_to_drop=1, layers_to_drop={2} +2023-03-29 23:04:11,533 INFO [train.py:892] (0/4) Epoch 48, batch 500, loss[loss=0.1516, simple_loss=0.2418, pruned_loss=0.03075, over 19853.00 frames. ], tot_loss[loss=0.1469, simple_loss=0.2296, pruned_loss=0.03213, over 3629170.54 frames. ], batch size: 58, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 23:04:58,620 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87706.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 23:05:28,006 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87718.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:06:06,500 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.147e+02 3.363e+02 3.924e+02 4.897e+02 8.586e+02, threshold=7.848e+02, percent-clipped=2.0 +2023-03-29 23:06:09,102 INFO [train.py:892] (0/4) Epoch 48, batch 550, loss[loss=0.142, simple_loss=0.2166, pruned_loss=0.0337, over 19866.00 frames. ], tot_loss[loss=0.1475, simple_loss=0.23, pruned_loss=0.03253, over 3699564.65 frames. ], batch size: 122, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 23:07:06,272 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.0226, 3.0784, 4.7448, 3.4422, 3.7329, 3.4954, 2.6537, 2.7616], + device='cuda:0'), covar=tensor([0.1127, 0.3245, 0.0446, 0.1238, 0.1840, 0.1711, 0.2787, 0.2802], + device='cuda:0'), in_proj_covar=tensor([0.0364, 0.0410, 0.0359, 0.0304, 0.0384, 0.0409, 0.0395, 0.0373], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 23:08:07,530 INFO [train.py:892] (0/4) Epoch 48, batch 600, loss[loss=0.1386, simple_loss=0.2208, pruned_loss=0.02817, over 19735.00 frames. ], tot_loss[loss=0.1482, simple_loss=0.2307, pruned_loss=0.03288, over 3755215.72 frames. ], batch size: 99, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 23:08:15,347 INFO [scaling.py:679] (0/4) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-03-29 23:09:18,908 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87817.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:09:27,667 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.71 vs. limit=5.0 +2023-03-29 23:09:59,446 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.332e+02 3.329e+02 4.109e+02 5.019e+02 8.243e+02, threshold=8.218e+02, percent-clipped=1.0 +2023-03-29 23:10:01,377 INFO [train.py:892] (0/4) Epoch 48, batch 650, loss[loss=0.1674, simple_loss=0.2769, pruned_loss=0.02898, over 18968.00 frames. ], tot_loss[loss=0.1469, simple_loss=0.2294, pruned_loss=0.03224, over 3798281.19 frames. ], batch size: 514, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 23:10:07,261 INFO [scaling.py:679] (0/4) Whitening: num_groups=1, num_channels=384, metric=4.73 vs. limit=5.0 +2023-03-29 23:10:59,778 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87861.0, num_to_drop=1, layers_to_drop={1} +2023-03-29 23:11:56,104 INFO [train.py:892] (0/4) Epoch 48, batch 700, loss[loss=0.1524, simple_loss=0.2318, pruned_loss=0.03649, over 19800.00 frames. ], tot_loss[loss=0.1471, simple_loss=0.2297, pruned_loss=0.0323, over 3831857.01 frames. ], batch size: 200, lr: 3.25e-03, grad_scale: 16.0 +2023-03-29 23:11:57,089 INFO [zipformer.py:625] (0/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87886.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:12:56,726 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([4.6315, 4.8381, 4.8757, 4.7521, 4.5981, 4.8592, 4.4313, 4.3782], + device='cuda:0'), covar=tensor([0.0486, 0.0487, 0.0494, 0.0456, 0.0589, 0.0475, 0.0666, 0.0990], + device='cuda:0'), in_proj_covar=tensor([0.0300, 0.0316, 0.0328, 0.0286, 0.0296, 0.0279, 0.0287, 0.0337], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 23:13:45,542 INFO [zipformer.py:625] (0/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87934.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:13:48,371 INFO [optim.py:368] (0/4) Clipping_scale=2.0, grad-norm quartiles 2.639e+02 3.556e+02 4.149e+02 4.938e+02 1.091e+03, threshold=8.298e+02, percent-clipped=2.0 +2023-03-29 23:13:50,830 INFO [train.py:892] (0/4) Epoch 48, batch 750, loss[loss=0.1356, simple_loss=0.2151, pruned_loss=0.02802, over 19798.00 frames. ], tot_loss[loss=0.1468, simple_loss=0.2291, pruned_loss=0.03226, over 3858454.70 frames. ], batch size: 211, lr: 3.25e-03, grad_scale: 32.0 +2023-03-29 23:14:38,869 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87940.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 23:14:50,630 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87945.0, num_to_drop=0, layers_to_drop=set() +2023-03-29 23:15:26,336 INFO [zipformer.py:625] (0/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87961.0, num_to_drop=1, layers_to_drop={0} +2023-03-29 23:17:04,555 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.0794, 2.5886, 3.0172, 3.2085, 3.7038, 4.1359, 3.8927, 4.0077], + device='cuda:0'), covar=tensor([0.0969, 0.1611, 0.1292, 0.0732, 0.0464, 0.0286, 0.0393, 0.0419], + device='cuda:0'), in_proj_covar=tensor([0.0170, 0.0174, 0.0186, 0.0162, 0.0148, 0.0143, 0.0138, 0.0126], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:0') +2023-03-29 23:17:10,149 INFO [train.py:892] (0/4) Epoch 48, batch 800, loss[loss=0.1279, simple_loss=0.2127, pruned_loss=0.02155, over 19786.00 frames. ], tot_loss[loss=0.146, simple_loss=0.2284, pruned_loss=0.03177, over 3879794.48 frames. ], batch size: 94, lr: 3.25e-03, grad_scale: 32.0 +2023-03-29 23:17:40,201 INFO [zipformer.py:1454] (0/4) attn_weights_entropy = tensor([3.3216, 2.5906, 3.7032, 2.9881, 3.1092, 2.9153, 2.2678, 2.3711], + device='cuda:0'), covar=tensor([0.1300, 0.3021, 0.0651, 0.1181, 0.1858, 0.1680, 0.2785, 0.2769], + device='cuda:0'), in_proj_covar=tensor([0.0364, 0.0411, 0.0359, 0.0304, 0.0385, 0.0410, 0.0397, 0.0374], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-03-29 23:17:41,335 INFO [checkpoint.py:75] (0/4) Saving checkpoint to pruned_transducer_stateless7_bbpe/exp/checkpoint-88000.pt