diff --git "a/exp/log/log-train-2023-03-08-13-46-28-1" "b/exp/log/log-train-2023-03-08-13-46-28-1" new file mode 100644--- /dev/null +++ "b/exp/log/log-train-2023-03-08-13-46-28-1" @@ -0,0 +1,15058 @@ +2023-03-08 13:46:28,357 INFO [train.py:970] (1/4) Training started +2023-03-08 13:46:28,357 INFO [train.py:980] (1/4) Device: cuda:1 +2023-03-08 13:46:28,366 INFO [train.py:989] (1/4) {'frame_shift_ms': 10.0, 'allowed_excess_duration_ratio': 0.1, 'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.22', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': '96c9a2aece2a3a7633da07740e24fa3d96f5498c', 'k2-git-date': 'Thu Nov 10 08:14:02 2022', 'lhotse-version': '1.13.0.dev+git.527d964.clean', 'torch-version': '1.12.1', 'torch-cuda-available': True, 'torch-cuda-version': '11.6', 'python-version': '3.8', 'icefall-git-branch': 'random_padding', 'icefall-git-sha1': '4cf2472-dirty', 'icefall-git-date': 'Wed Mar 1 23:53:23 2023', 'icefall-path': '/ceph-data4/yangxiaoyu/softwares/icefall_development/icefall_random_padding', 'k2-path': '/ceph-data4/yangxiaoyu/softwares/anaconda3/envs/k2_latest/lib/python3.8/site-packages/k2/__init__.py', 'lhotse-path': '/ceph-data4/yangxiaoyu/softwares/lhotse_development/lhotse_random_padding_left/lhotse/__init__.py', 'hostname': 'de-74279-k2-train-9-0208143539-7dcb6bfd79-b6fdq', 'IP address': '10.177.13.150'}, 'world_size': 4, 'master_port': 18180, 'tensorboard': True, 'num_epochs': 30, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('pruned_transducer_stateless7/exp_960h_no_paddingidx_ngpu4'), 'bpe_model': 'data/lang_bpe_500/bpe.model', 'base_lr': 0.05, 'lr_batches': 5000, 'lr_epochs': 3.5, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 2000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,4,3,2,4', 'feedforward_dims': '1024,1024,2048,2048,1024', 'nhead': '8,8,8,8,8', 'encoder_dims': '384,384,384,384,384', 'attention_dims': '192,192,192,192,192', 'encoder_unmasked_dims': '256,256,256,256,256', 'zipformer_downsampling_factors': '1,2,4,8,2', 'cnn_module_kernels': '31,31,31,31,31', 'decoder_dim': 512, 'joiner_dim': 512, 'full_libri': True, 'manifest_dir': PosixPath('data/fbank'), 'max_duration': 750, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'random_left_padding': False, 'num_left_padding': 8, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': True, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2023-03-08 13:46:28,367 INFO [train.py:991] (1/4) About to create model +2023-03-08 13:46:29,243 INFO [zipformer.py:178] (1/4) At encoder stack 4, which has downsampling_factor=2, we will combine the outputs of layers 1 and 3, with downsampling_factors=2 and 8. +2023-03-08 13:46:29,267 INFO [train.py:995] (1/4) Number of model parameters: 70369391 +2023-03-08 13:46:32,459 INFO [train.py:1010] (1/4) Using DDP +2023-03-08 13:46:32,864 INFO [asr_datamodule.py:439] (1/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2023-03-08 13:46:32,867 INFO [asr_datamodule.py:244] (1/4) Enable MUSAN +2023-03-08 13:46:32,867 INFO [asr_datamodule.py:245] (1/4) About to get Musan cuts +2023-03-08 13:46:35,147 INFO [asr_datamodule.py:269] (1/4) Enable SpecAugment +2023-03-08 13:46:35,147 INFO [asr_datamodule.py:270] (1/4) Time warp factor: 80 +2023-03-08 13:46:35,148 INFO [asr_datamodule.py:280] (1/4) Num frame mask: 10 +2023-03-08 13:46:35,148 INFO [asr_datamodule.py:293] (1/4) About to create train dataset +2023-03-08 13:46:35,148 INFO [asr_datamodule.py:320] (1/4) Using DynamicBucketingSampler. +2023-03-08 13:46:42,099 INFO [asr_datamodule.py:335] (1/4) About to create train dataloader +2023-03-08 13:46:42,100 INFO [asr_datamodule.py:449] (1/4) About to get dev-clean cuts +2023-03-08 13:46:42,101 INFO [asr_datamodule.py:456] (1/4) About to get dev-other cuts +2023-03-08 13:46:42,102 INFO [asr_datamodule.py:366] (1/4) About to create dev dataset +2023-03-08 13:46:42,434 INFO [asr_datamodule.py:383] (1/4) About to create dev dataloader +2023-03-08 13:47:06,858 INFO [train.py:898] (1/4) Epoch 1, batch 0, loss[loss=7.464, simple_loss=6.759, pruned_loss=7.035, over 18566.00 frames. ], tot_loss[loss=7.464, simple_loss=6.759, pruned_loss=7.035, over 18566.00 frames. ], batch size: 54, lr: 2.50e-02, grad_scale: 2.0 +2023-03-08 13:47:06,858 INFO [train.py:923] (1/4) Computing validation loss +2023-03-08 13:47:18,763 INFO [train.py:932] (1/4) Epoch 1, validation: loss=6.911, simple_loss=6.237, pruned_loss=6.721, over 944034.00 frames. +2023-03-08 13:47:18,764 INFO [train.py:933] (1/4) Maximum memory allocated so far is 14689MB +2023-03-08 13:47:22,967 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-08 13:47:42,154 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 13:47:48,443 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.4236, 4.4220, 4.3997, 4.3461, 4.4175, 4.3901, 4.4106, 4.4204], + device='cuda:1'), covar=tensor([0.0007, 0.0008, 0.0010, 0.0008, 0.0012, 0.0012, 0.0009, 0.0007], + device='cuda:1'), in_proj_covar=tensor([0.0009, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009], + device='cuda:1'), out_proj_covar=tensor([8.9347e-06, 8.8633e-06, 9.1289e-06, 8.8904e-06, 9.1215e-06, 8.9558e-06, + 9.0463e-06, 9.0285e-06], device='cuda:1') +2023-03-08 13:48:01,462 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.8058, 5.7403, 5.7168, 5.7705, 5.7839, 5.8014, 5.7886, 5.7434], + device='cuda:1'), covar=tensor([0.0007, 0.0006, 0.0011, 0.0011, 0.0007, 0.0011, 0.0010, 0.0008], + device='cuda:1'), in_proj_covar=tensor([0.0009, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009], + device='cuda:1'), out_proj_covar=tensor([9.1599e-06, 9.2839e-06, 9.1385e-06, 9.3348e-06, 9.1633e-06, 9.2085e-06, + 9.2734e-06, 9.2691e-06], device='cuda:1') +2023-03-08 13:48:05,138 INFO [train.py:898] (1/4) Epoch 1, batch 50, loss[loss=1.405, simple_loss=1.244, pruned_loss=1.442, over 17710.00 frames. ], tot_loss[loss=2.153, simple_loss=1.947, pruned_loss=1.973, over 814977.38 frames. ], batch size: 70, lr: 2.75e-02, grad_scale: 1.0 +2023-03-08 13:48:08,639 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=49.51 vs. limit=5.0 +2023-03-08 13:48:18,872 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=6.08 vs. limit=2.0 +2023-03-08 13:48:33,931 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 13:48:49,256 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=8.96 vs. limit=2.0 +2023-03-08 13:48:51,383 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=19.98 vs. limit=2.0 +2023-03-08 13:48:51,697 WARNING [train.py:888] (1/4) Grad scale is small: 0.0009765625 +2023-03-08 13:48:51,698 INFO [train.py:898] (1/4) Epoch 1, batch 100, loss[loss=1.019, simple_loss=0.8722, pruned_loss=1.159, over 18247.00 frames. ], tot_loss[loss=1.623, simple_loss=1.444, pruned_loss=1.611, over 1439341.76 frames. ], batch size: 45, lr: 3.00e-02, grad_scale: 0.001953125 +2023-03-08 13:49:00,742 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 6.526e+01 1.420e+02 2.842e+02 1.227e+03 3.323e+06, threshold=5.685e+02, percent-clipped=0.0 +2023-03-08 13:49:18,541 WARNING [optim.py:389] (1/4) Scaling gradients by 0.03670352324843407, model_norm_threshold=568.4981689453125 +2023-03-08 13:49:18,735 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.51, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=1.227e+08, grad_sumsq = 3.226e+09, orig_rms_sq=3.802e-02 +2023-03-08 13:49:29,074 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=144.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-08 13:49:34,264 INFO [train.py:898] (1/4) Epoch 1, batch 150, loss[loss=1.116, simple_loss=0.9413, pruned_loss=1.255, over 18493.00 frames. ], tot_loss[loss=1.407, simple_loss=1.231, pruned_loss=1.472, over 1906700.73 frames. ], batch size: 53, lr: 3.25e-02, grad_scale: 0.001953125 +2023-03-08 13:49:39,269 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=3.12 vs. limit=2.0 +2023-03-08 13:49:44,095 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=69.07 vs. limit=5.0 +2023-03-08 13:50:05,703 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([6.5193, 6.5401, 6.5431, 6.5435, 6.5434, 6.5402, 6.5377, 6.5235], + device='cuda:1'), covar=tensor([0.0072, 0.0044, 0.0033, 0.0046, 0.0017, 0.0038, 0.0019, 0.0038], + device='cuda:1'), in_proj_covar=tensor([0.0009, 0.0009, 0.0009, 0.0009, 0.0008, 0.0008, 0.0009, 0.0008], + device='cuda:1'), out_proj_covar=tensor([8.8102e-06, 8.7896e-06, 8.6602e-06, 8.5332e-06, 8.5679e-06, 8.3902e-06, + 8.5187e-06, 8.4851e-06], device='cuda:1') +2023-03-08 13:50:16,349 WARNING [train.py:888] (1/4) Grad scale is small: 0.001953125 +2023-03-08 13:50:16,349 INFO [train.py:898] (1/4) Epoch 1, batch 200, loss[loss=1.029, simple_loss=0.8645, pruned_loss=1.092, over 15923.00 frames. ], tot_loss[loss=1.265, simple_loss=1.094, pruned_loss=1.344, over 2272615.87 frames. ], batch size: 95, lr: 3.50e-02, grad_scale: 0.00390625 +2023-03-08 13:50:32,161 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 7.044e+01 1.307e+02 2.204e+02 4.914e+02 1.549e+04, threshold=4.408e+02, percent-clipped=23.0 +2023-03-08 13:51:05,134 INFO [train.py:898] (1/4) Epoch 1, batch 250, loss[loss=0.9626, simple_loss=0.8041, pruned_loss=0.9813, over 18299.00 frames. ], tot_loss[loss=1.176, simple_loss=1.009, pruned_loss=1.244, over 2557155.68 frames. ], batch size: 57, lr: 3.75e-02, grad_scale: 0.00390625 +2023-03-08 13:51:15,576 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([6.0804, 6.0774, 6.0779, 6.0771, 6.0803, 6.0802, 6.0487, 6.0796], + device='cuda:1'), covar=tensor([0.0012, 0.0011, 0.0009, 0.0012, 0.0010, 0.0011, 0.0010, 0.0017], + device='cuda:1'), in_proj_covar=tensor([0.0009, 0.0008, 0.0009, 0.0008, 0.0009, 0.0008, 0.0008, 0.0008], + device='cuda:1'), out_proj_covar=tensor([8.6542e-06, 8.7583e-06, 8.6064e-06, 8.3132e-06, 8.5118e-06, 8.5975e-06, + 8.4249e-06, 8.6711e-06], device='cuda:1') +2023-03-08 13:51:37,157 WARNING [optim.py:389] (1/4) Scaling gradients by 0.0006386953755281866, model_norm_threshold=440.7669677734375 +2023-03-08 13:51:37,397 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.skip_modules.4.weight1 with proportion 0.43, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=2.042e+11, grad_sumsq = 2.042e+11, orig_rms_sq=1.000e+00 +2023-03-08 13:51:43,351 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=296.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 13:51:46,143 WARNING [optim.py:389] (1/4) Scaling gradients by 0.04052559658885002, model_norm_threshold=440.7669677734375 +2023-03-08 13:51:46,311 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.77, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=9.126e+07, grad_sumsq = 1.809e+09, orig_rms_sq=5.045e-02 +2023-03-08 13:51:46,608 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=300.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-08 13:51:47,142 WARNING [train.py:888] (1/4) Grad scale is small: 0.00390625 +2023-03-08 13:51:47,142 INFO [train.py:898] (1/4) Epoch 1, batch 300, loss[loss=1.014, simple_loss=0.8466, pruned_loss=0.9823, over 15990.00 frames. ], tot_loss[loss=1.114, simple_loss=0.9486, pruned_loss=1.163, over 2782418.90 frames. ], batch size: 94, lr: 4.00e-02, grad_scale: 0.0078125 +2023-03-08 13:51:55,816 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 8.726e+01 1.552e+02 2.190e+02 3.865e+02 6.901e+05, threshold=4.380e+02, percent-clipped=20.0 +2023-03-08 13:52:05,161 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=8.81 vs. limit=2.0 +2023-03-08 13:52:10,066 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=52.86 vs. limit=5.0 +2023-03-08 13:52:21,627 WARNING [optim.py:389] (1/4) Scaling gradients by 0.00015154901484493166, model_norm_threshold=438.01873779296875 +2023-03-08 13:52:21,830 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.70, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=5.875e+12, grad_sumsq = 1.517e+14, orig_rms_sq=3.874e-02 +2023-03-08 13:52:23,427 WARNING [optim.py:389] (1/4) Scaling gradients by 0.01597026363015175, model_norm_threshold=438.01873779296875 +2023-03-08 13:52:23,696 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.80, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=6.028e+08, grad_sumsq = 1.556e+10, orig_rms_sq=3.874e-02 +2023-03-08 13:52:24,510 WARNING [optim.py:389] (1/4) Scaling gradients by 0.022203104570508003, model_norm_threshold=438.01873779296875 +2023-03-08 13:52:24,671 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.86, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=3.351e+08, grad_sumsq = 8.650e+09, orig_rms_sq=3.874e-02 +2023-03-08 13:52:26,189 WARNING [optim.py:389] (1/4) Scaling gradients by 0.008352968841791153, model_norm_threshold=438.01873779296875 +2023-03-08 13:52:26,399 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.77, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=2.124e+09, grad_sumsq = 5.347e+10, orig_rms_sq=3.973e-02 +2023-03-08 13:52:30,359 INFO [train.py:898] (1/4) Epoch 1, batch 350, loss[loss=0.8776, simple_loss=0.7153, pruned_loss=0.8725, over 18232.00 frames. ], tot_loss[loss=1.075, simple_loss=0.9076, pruned_loss=1.108, over 2964075.05 frames. ], batch size: 45, lr: 4.25e-02, grad_scale: 0.00390625 +2023-03-08 13:52:31,226 WARNING [optim.py:389] (1/4) Scaling gradients by 0.00011210949014639482, model_norm_threshold=438.01873779296875 +2023-03-08 13:52:31,459 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.85, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=1.296e+13, grad_sumsq = 3.160e+14, orig_rms_sq=4.100e-02 +2023-03-08 13:52:34,664 WARNING [optim.py:389] (1/4) Scaling gradients by 0.06386774033308029, model_norm_threshold=438.01873779296875 +2023-03-08 13:52:34,839 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.58, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=2.733e+07, grad_sumsq = 6.508e+08, orig_rms_sq=4.200e-02 +2023-03-08 13:52:35,610 WARNING [optim.py:389] (1/4) Scaling gradients by 0.0002155240799766034, model_norm_threshold=438.01873779296875 +2023-03-08 13:52:35,827 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.85, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=3.507e+12, grad_sumsq = 8.280e+13, orig_rms_sq=4.236e-02 +2023-03-08 13:52:36,176 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=357.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-08 13:52:43,649 WARNING [optim.py:389] (1/4) Scaling gradients by 0.08033499121665955, model_norm_threshold=438.01873779296875 +2023-03-08 13:52:43,805 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.60, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=1.796e+07, grad_sumsq = 4.255e+08, orig_rms_sq=4.221e-02 +2023-03-08 13:52:58,804 WARNING [optim.py:389] (1/4) Scaling gradients by 0.00024505704641342163, model_norm_threshold=438.01873779296875 +2023-03-08 13:52:58,961 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.67, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=2.127e+12, grad_sumsq = 5.153e+13, orig_rms_sq=4.128e-02 +2023-03-08 13:53:00,792 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=387.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 13:53:01,256 WARNING [optim.py:389] (1/4) Scaling gradients by 0.0035240945871919394, model_norm_threshold=438.01873779296875 +2023-03-08 13:53:01,445 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.51, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=7.954e+09, grad_sumsq = 1.927e+11, orig_rms_sq=4.128e-02 +2023-03-08 13:53:02,956 WARNING [optim.py:389] (1/4) Scaling gradients by 0.00012842776777688414, model_norm_threshold=438.01873779296875 +2023-03-08 13:53:03,123 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.65, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=7.588e+12, grad_sumsq = 1.863e+14, orig_rms_sq=4.072e-02 +2023-03-08 13:53:10,042 WARNING [optim.py:389] (1/4) Scaling gradients by 0.007196913007646799, model_norm_threshold=438.01873779296875 +2023-03-08 13:53:10,286 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoders.4.encoder.layers.0.norm_final.eps with proportion 0.38, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=1.420e+09, grad_sumsq = 1.420e+09, orig_rms_sq=1.000e+00 +2023-03-08 13:53:13,987 WARNING [train.py:888] (1/4) Grad scale is small: 0.00390625 +2023-03-08 13:53:13,988 INFO [train.py:898] (1/4) Epoch 1, batch 400, loss[loss=0.8709, simple_loss=0.7007, pruned_loss=0.8573, over 18248.00 frames. ], tot_loss[loss=1.043, simple_loss=0.8735, pruned_loss=1.062, over 3104129.04 frames. ], batch size: 45, lr: 4.50e-02, grad_scale: 0.0078125 +2023-03-08 13:53:23,506 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.343e+02 2.188e+02 3.037e+02 6.402e+02 3.907e+06, threshold=6.074e+02, percent-clipped=33.0 +2023-03-08 13:53:40,377 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=5.51 vs. limit=2.0 +2023-03-08 13:53:44,555 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=439.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-08 13:53:51,231 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=11.98 vs. limit=2.0 +2023-03-08 13:53:52,108 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=10.86 vs. limit=2.0 +2023-03-08 13:53:53,275 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=448.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-08 13:53:55,318 INFO [train.py:898] (1/4) Epoch 1, batch 450, loss[loss=0.9947, simple_loss=0.8103, pruned_loss=0.9102, over 12426.00 frames. ], tot_loss[loss=1.021, simple_loss=0.8471, pruned_loss=1.027, over 3210121.68 frames. ], batch size: 130, lr: 4.75e-02, grad_scale: 0.0078125 +2023-03-08 13:53:59,211 WARNING [optim.py:389] (1/4) Scaling gradients by 0.001993334386497736, model_norm_threshold=607.3988037109375 +2023-03-08 13:53:59,373 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoders.4.encoder.layers.1.norm_final.eps with proportion 0.46, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=4.249e+10, grad_sumsq = 4.249e+10, orig_rms_sq=1.000e+00 +2023-03-08 13:54:00,201 WARNING [optim.py:389] (1/4) Scaling gradients by 0.009787621907889843, model_norm_threshold=607.3988037109375 +2023-03-08 13:54:00,370 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoders.4.encoder.layers.0.norm_final.eps with proportion 0.37, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=1.407e+09, grad_sumsq = 1.407e+09, orig_rms_sq=1.000e+00 +2023-03-08 13:54:07,275 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=11.15 vs. limit=2.0 +2023-03-08 13:54:09,177 WARNING [optim.py:389] (1/4) Scaling gradients by 0.07029950618743896, model_norm_threshold=607.3988037109375 +2023-03-08 13:54:09,374 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.83, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=6.195e+07, grad_sumsq = 1.377e+09, orig_rms_sq=4.498e-02 +2023-03-08 13:54:20,900 WARNING [optim.py:389] (1/4) Scaling gradients by 0.008813662454485893, model_norm_threshold=607.3988037109375 +2023-03-08 13:54:21,061 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.85, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=4.028e+09, grad_sumsq = 8.824e+10, orig_rms_sq=4.564e-02 +2023-03-08 13:54:27,988 WARNING [optim.py:389] (1/4) Scaling gradients by 0.024284733459353447, model_norm_threshold=607.3988037109375 +2023-03-08 13:54:28,151 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.83, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=5.221e+08, grad_sumsq = 1.167e+10, orig_rms_sq=4.473e-02 +2023-03-08 13:54:38,287 WARNING [optim.py:389] (1/4) Scaling gradients by 0.0006707996362820268, model_norm_threshold=607.3988037109375 +2023-03-08 13:54:38,507 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoders.4.encoder.layers.1.norm_final.eps with proportion 0.69, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=5.647e+11, grad_sumsq = 5.647e+11, orig_rms_sq=1.000e+00 +2023-03-08 13:54:38,547 WARNING [train.py:888] (1/4) Grad scale is small: 0.0078125 +2023-03-08 13:54:38,547 INFO [train.py:898] (1/4) Epoch 1, batch 500, loss[loss=0.9905, simple_loss=0.7944, pruned_loss=0.9081, over 12731.00 frames. ], tot_loss[loss=1.007, simple_loss=0.8265, pruned_loss=1.003, over 3293178.24 frames. ], batch size: 130, lr: 4.99e-02, grad_scale: 0.015625 +2023-03-08 13:54:42,458 WARNING [optim.py:389] (1/4) Scaling gradients by 0.006503281649202108, model_norm_threshold=607.3988037109375 +2023-03-08 13:54:42,621 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoders.1.out_combiner.weight1 with proportion 0.48, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=4.173e+09, grad_sumsq = 4.173e+09, orig_rms_sq=1.000e+00 +2023-03-08 13:54:48,229 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.357e+02 2.649e+02 4.541e+02 8.074e+02 9.055e+05, threshold=9.081e+02, percent-clipped=35.0 +2023-03-08 13:54:48,229 WARNING [optim.py:389] (1/4) Scaling gradients by 0.07500762492418289, model_norm_threshold=908.1141357421875 +2023-03-08 13:54:48,413 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoders.1.out_combiner.weight1 with proportion 0.80, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=1.178e+08, grad_sumsq = 1.178e+08, orig_rms_sq=1.000e+00 +2023-03-08 13:54:53,816 WARNING [optim.py:389] (1/4) Scaling gradients by 0.00848373118788004, model_norm_threshold=908.1141357421875 +2023-03-08 13:54:54,045 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoders.2.out_combiner.weight1 with proportion 0.43, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=4.921e+09, grad_sumsq = 4.921e+09, orig_rms_sq=1.000e+00 +2023-03-08 13:54:54,844 WARNING [optim.py:389] (1/4) Scaling gradients by 0.0037236642092466354, model_norm_threshold=908.1141357421875 +2023-03-08 13:54:55,008 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.skip_modules.4.weight1 with proportion 0.69, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=4.127e+10, grad_sumsq = 4.127e+10, orig_rms_sq=1.000e+00 +2023-03-08 13:54:58,787 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=5.77 vs. limit=2.0 +2023-03-08 13:55:00,565 WARNING [optim.py:389] (1/4) Scaling gradients by 0.0036443807184696198, model_norm_threshold=908.1141357421875 +2023-03-08 13:55:00,757 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.88, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=5.460e+10, grad_sumsq = 1.061e+12, orig_rms_sq=5.145e-02 +2023-03-08 13:55:01,575 WARNING [optim.py:389] (1/4) Scaling gradients by 0.004900030791759491, model_norm_threshold=908.1141357421875 +2023-03-08 13:55:01,750 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.35, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=1.186e+10, grad_sumsq = 2.280e+11, orig_rms_sq=5.204e-02 +2023-03-08 13:55:02,552 WARNING [optim.py:389] (1/4) Scaling gradients by 0.07598941773176193, model_norm_threshold=908.1141357421875 +2023-03-08 13:55:02,710 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.93, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=1.329e+08, grad_sumsq = 2.553e+09, orig_rms_sq=5.204e-02 +2023-03-08 13:55:05,074 WARNING [optim.py:389] (1/4) Scaling gradients by 0.028503550216555595, model_norm_threshold=908.1141357421875 +2023-03-08 13:55:05,241 INFO [optim.py:451] (1/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.78, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=7.871e+08, grad_sumsq = 1.620e+10, orig_rms_sq=4.859e-02 +2023-03-08 13:55:06,417 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=75.83 vs. limit=5.0 +2023-03-08 13:55:21,072 INFO [train.py:898] (1/4) Epoch 1, batch 550, loss[loss=0.9938, simple_loss=0.7775, pruned_loss=0.9293, over 18271.00 frames. ], tot_loss[loss=1.001, simple_loss=0.8135, pruned_loss=0.9843, over 3346910.97 frames. ], batch size: 57, lr: 4.98e-02, grad_scale: 0.015625 +2023-03-08 13:55:22,953 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4332, 5.4990, 4.8798, 5.2684, 5.1911, 5.4182, 5.4629, 4.9583], + device='cuda:1'), covar=tensor([0.0841, 0.1747, 0.0774, 0.1340, 0.1121, 0.0646, 0.1710, 0.1118], + device='cuda:1'), in_proj_covar=tensor([0.0015, 0.0015, 0.0016, 0.0016, 0.0014, 0.0015, 0.0014, 0.0016], + device='cuda:1'), out_proj_covar=tensor([1.4137e-05, 1.4739e-05, 1.4501e-05, 1.5390e-05, 1.3984e-05, 1.4078e-05, + 1.3902e-05, 1.5452e-05], device='cuda:1') +2023-03-08 13:55:29,948 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=562.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 13:55:34,761 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=568.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 13:55:44,971 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=106.81 vs. limit=5.0 +2023-03-08 13:55:51,851 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=590.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 13:55:52,816 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=6.52 vs. limit=2.0 +2023-03-08 13:56:00,917 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=600.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-08 13:56:01,484 INFO [train.py:898] (1/4) Epoch 1, batch 600, loss[loss=0.9595, simple_loss=0.7378, pruned_loss=0.8978, over 18355.00 frames. ], tot_loss[loss=0.9939, simple_loss=0.7986, pruned_loss=0.9677, over 3416628.39 frames. ], batch size: 46, lr: 4.98e-02, grad_scale: 0.03125 +2023-03-08 13:56:03,897 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=4.79 vs. limit=2.0 +2023-03-08 13:56:11,703 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.187e+02 4.468e+02 7.848e+02 1.243e+03 2.492e+05, threshold=1.570e+03, percent-clipped=35.0 +2023-03-08 13:56:19,728 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=623.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-08 13:56:24,206 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=629.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-08 13:56:39,119 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=648.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 13:56:41,345 INFO [train.py:898] (1/4) Epoch 1, batch 650, loss[loss=1.058, simple_loss=0.8077, pruned_loss=0.9713, over 17994.00 frames. ], tot_loss[loss=0.9945, simple_loss=0.7909, pruned_loss=0.9567, over 3444305.78 frames. ], batch size: 65, lr: 4.98e-02, grad_scale: 0.03125 +2023-03-08 13:56:42,241 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=651.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-08 13:56:42,913 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=652.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-08 13:57:22,328 INFO [train.py:898] (1/4) Epoch 1, batch 700, loss[loss=1.073, simple_loss=0.8148, pruned_loss=0.9642, over 18315.00 frames. ], tot_loss[loss=0.9948, simple_loss=0.7831, pruned_loss=0.9451, over 3482289.80 frames. ], batch size: 54, lr: 4.98e-02, grad_scale: 0.0625 +2023-03-08 13:57:33,534 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.059e+02 3.958e+02 5.670e+02 9.058e+02 3.205e+03, threshold=1.134e+03, percent-clipped=9.0 +2023-03-08 13:57:44,589 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=63.16 vs. limit=5.0 +2023-03-08 13:57:54,350 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=739.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-08 13:57:57,518 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=743.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-08 13:58:03,598 INFO [train.py:898] (1/4) Epoch 1, batch 750, loss[loss=1.088, simple_loss=0.8176, pruned_loss=0.9673, over 18299.00 frames. ], tot_loss[loss=0.997, simple_loss=0.7779, pruned_loss=0.9342, over 3488051.98 frames. ], batch size: 54, lr: 4.97e-02, grad_scale: 0.0625 +2023-03-08 13:58:13,120 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=6.77 vs. limit=2.0 +2023-03-08 13:58:34,296 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=787.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 13:58:41,744 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=3.39 vs. limit=2.0 +2023-03-08 13:58:45,699 INFO [train.py:898] (1/4) Epoch 1, batch 800, loss[loss=1.068, simple_loss=0.802, pruned_loss=0.9257, over 18253.00 frames. ], tot_loss[loss=1.002, simple_loss=0.7744, pruned_loss=0.9262, over 3511405.65 frames. ], batch size: 60, lr: 4.97e-02, grad_scale: 0.125 +2023-03-08 13:58:55,630 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.968e+02 3.286e+02 5.870e+02 9.737e+02 2.138e+03, threshold=1.174e+03, percent-clipped=19.0 +2023-03-08 13:59:21,742 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=4.97 vs. limit=2.0 +2023-03-08 13:59:23,820 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=847.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 13:59:26,831 INFO [train.py:898] (1/4) Epoch 1, batch 850, loss[loss=1.014, simple_loss=0.7442, pruned_loss=0.8863, over 18391.00 frames. ], tot_loss[loss=1.001, simple_loss=0.7673, pruned_loss=0.9136, over 3517147.09 frames. ], batch size: 50, lr: 4.96e-02, grad_scale: 0.125 +2023-03-08 13:59:58,353 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=3.33 vs. limit=2.0 +2023-03-08 14:00:03,923 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=3.13 vs. limit=2.0 +2023-03-08 14:00:09,700 INFO [train.py:898] (1/4) Epoch 1, batch 900, loss[loss=1.051, simple_loss=0.7883, pruned_loss=0.8705, over 12989.00 frames. ], tot_loss[loss=1.005, simple_loss=0.7638, pruned_loss=0.9034, over 3528550.04 frames. ], batch size: 129, lr: 4.96e-02, grad_scale: 0.25 +2023-03-08 14:00:10,832 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.9448, 5.8002, 5.8549, 5.6934, 5.6870, 5.7620, 5.6976, 5.9377], + device='cuda:1'), covar=tensor([0.0302, 0.0695, 0.0294, 0.0988, 0.0743, 0.0636, 0.0840, 0.0275], + device='cuda:1'), in_proj_covar=tensor([0.0011, 0.0012, 0.0011, 0.0011, 0.0012, 0.0012, 0.0011, 0.0011], + device='cuda:1'), out_proj_covar=tensor([1.0925e-05, 1.1139e-05, 1.1108e-05, 1.0879e-05, 1.1367e-05, 1.1430e-05, + 1.0674e-05, 1.0927e-05], device='cuda:1') +2023-03-08 14:00:15,587 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=908.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-08 14:00:19,231 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.803e+02 2.708e+02 4.690e+02 7.118e+02 2.600e+03, threshold=9.379e+02, percent-clipped=5.0 +2023-03-08 14:00:24,221 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=918.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-08 14:00:28,872 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=924.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-08 14:00:47,941 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=946.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-08 14:00:51,747 INFO [train.py:898] (1/4) Epoch 1, batch 950, loss[loss=1.043, simple_loss=0.7693, pruned_loss=0.8656, over 18361.00 frames. ], tot_loss[loss=1.007, simple_loss=0.7602, pruned_loss=0.8926, over 3540864.73 frames. ], batch size: 55, lr: 4.96e-02, grad_scale: 0.25 +2023-03-08 14:00:52,781 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=952.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-08 14:00:53,468 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([6.5644, 6.5679, 6.5491, 6.5635, 6.5537, 6.5671, 6.5667, 6.5664], + device='cuda:1'), covar=tensor([0.0622, 0.0296, 0.0646, 0.0280, 0.0505, 0.0331, 0.0337, 0.0285], + device='cuda:1'), in_proj_covar=tensor([0.0017, 0.0016, 0.0016, 0.0015, 0.0016, 0.0015, 0.0016, 0.0016], + device='cuda:1'), out_proj_covar=tensor([1.7466e-05, 1.6638e-05, 1.6732e-05, 1.5965e-05, 1.7144e-05, 1.6531e-05, + 1.6045e-05, 1.6333e-05], device='cuda:1') +2023-03-08 14:01:30,375 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9470, 4.8513, 5.0226, 4.6642, 4.7801, 4.8492, 4.8847, 5.0492], + device='cuda:1'), covar=tensor([0.0557, 0.0721, 0.0402, 0.0772, 0.1307, 0.0616, 0.0526, 0.0446], + device='cuda:1'), in_proj_covar=tensor([0.0012, 0.0011, 0.0012, 0.0011, 0.0012, 0.0011, 0.0011, 0.0012], + device='cuda:1'), out_proj_covar=tensor([1.1496e-05, 1.1593e-05, 1.1542e-05, 1.1860e-05, 1.1400e-05, 1.1238e-05, + 1.0801e-05, 1.1411e-05], device='cuda:1') +2023-03-08 14:01:33,196 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1000.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:01:33,774 INFO [train.py:898] (1/4) Epoch 1, batch 1000, loss[loss=1.07, simple_loss=0.7846, pruned_loss=0.8748, over 18336.00 frames. ], tot_loss[loss=1.012, simple_loss=0.759, pruned_loss=0.8824, over 3555183.58 frames. ], batch size: 56, lr: 4.95e-02, grad_scale: 0.5 +2023-03-08 14:01:43,703 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.541e+02 3.491e+02 4.812e+02 7.825e+02 1.437e+03, threshold=9.623e+02, percent-clipped=15.0 +2023-03-08 14:01:59,652 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.9078, 5.8386, 5.7690, 5.8776, 5.6517, 5.8106, 5.7306, 6.0044], + device='cuda:1'), covar=tensor([0.0597, 0.0666, 0.0539, 0.0627, 0.0788, 0.0623, 0.1006, 0.0362], + device='cuda:1'), in_proj_covar=tensor([0.0013, 0.0014, 0.0013, 0.0013, 0.0014, 0.0014, 0.0014, 0.0013], + device='cuda:1'), out_proj_covar=tensor([1.2881e-05, 1.3154e-05, 1.3095e-05, 1.2878e-05, 1.3420e-05, 1.3558e-05, + 1.2681e-05, 1.2896e-05], device='cuda:1') +2023-03-08 14:02:05,209 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=3.05 vs. limit=2.0 +2023-03-08 14:02:10,268 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1043.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-08 14:02:13,491 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=4.71 vs. limit=2.0 +2023-03-08 14:02:17,218 INFO [train.py:898] (1/4) Epoch 1, batch 1050, loss[loss=0.8781, simple_loss=0.6619, pruned_loss=0.681, over 18240.00 frames. ], tot_loss[loss=1.012, simple_loss=0.7569, pruned_loss=0.8657, over 3540754.43 frames. ], batch size: 45, lr: 4.95e-02, grad_scale: 0.5 +2023-03-08 14:02:20,204 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.8516, 5.5607, 5.7127, 5.4870, 5.5100, 5.9465, 5.6490, 5.9330], + device='cuda:1'), covar=tensor([0.3930, 0.6072, 0.4660, 0.7445, 0.9825, 0.2980, 0.7754, 0.2681], + device='cuda:1'), in_proj_covar=tensor([0.0053, 0.0048, 0.0051, 0.0049, 0.0058, 0.0052, 0.0056, 0.0049], + device='cuda:1'), out_proj_covar=tensor([5.0201e-05, 4.6560e-05, 5.0902e-05, 4.8875e-05, 5.5052e-05, 5.1912e-05, + 4.9607e-05, 4.4859e-05], device='cuda:1') +2023-03-08 14:02:21,193 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=6.85 vs. limit=2.0 +2023-03-08 14:02:23,590 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=2.64 vs. limit=2.0 +2023-03-08 14:02:26,108 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=2.96 vs. limit=2.0 +2023-03-08 14:02:40,987 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=2.45 vs. limit=2.0 +2023-03-08 14:02:43,560 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=10.54 vs. limit=5.0 +2023-03-08 14:02:52,429 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1091.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:02:54,365 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=3.06 vs. limit=2.0 +2023-03-08 14:02:57,788 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=2.15 vs. limit=2.0 +2023-03-08 14:03:01,214 INFO [train.py:898] (1/4) Epoch 1, batch 1100, loss[loss=0.9769, simple_loss=0.7469, pruned_loss=0.7314, over 18301.00 frames. ], tot_loss[loss=1.004, simple_loss=0.7526, pruned_loss=0.8405, over 3550149.01 frames. ], batch size: 49, lr: 4.94e-02, grad_scale: 1.0 +2023-03-08 14:03:10,796 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.010e+02 4.676e+02 7.290e+02 9.507e+02 1.781e+03, threshold=1.458e+03, percent-clipped=22.0 +2023-03-08 14:03:22,078 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=5.95 vs. limit=5.0 +2023-03-08 14:03:44,992 INFO [train.py:898] (1/4) Epoch 1, batch 1150, loss[loss=0.8948, simple_loss=0.7059, pruned_loss=0.6345, over 18348.00 frames. ], tot_loss[loss=0.9868, simple_loss=0.7443, pruned_loss=0.8043, over 3552417.20 frames. ], batch size: 55, lr: 4.94e-02, grad_scale: 1.0 +2023-03-08 14:03:46,945 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1153.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:04:11,048 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=6.00 vs. limit=5.0 +2023-03-08 14:04:28,516 INFO [train.py:898] (1/4) Epoch 1, batch 1200, loss[loss=0.8327, simple_loss=0.67, pruned_loss=0.5682, over 18492.00 frames. ], tot_loss[loss=0.9606, simple_loss=0.7318, pruned_loss=0.7607, over 3570353.80 frames. ], batch size: 51, lr: 4.93e-02, grad_scale: 2.0 +2023-03-08 14:04:30,937 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1203.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-08 14:04:38,787 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.547e+02 6.963e+02 9.525e+02 1.418e+03 3.358e+03, threshold=1.905e+03, percent-clipped=24.0 +2023-03-08 14:04:39,913 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1214.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-08 14:04:40,999 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=5.88 vs. limit=5.0 +2023-03-08 14:04:43,199 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1218.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-08 14:04:47,898 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1224.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 14:05:04,151 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.96 vs. limit=2.0 +2023-03-08 14:05:06,346 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1246.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:05:11,324 INFO [train.py:898] (1/4) Epoch 1, batch 1250, loss[loss=0.8326, simple_loss=0.6806, pruned_loss=0.5504, over 18128.00 frames. ], tot_loss[loss=0.9271, simple_loss=0.7156, pruned_loss=0.7119, over 3584062.54 frames. ], batch size: 62, lr: 4.92e-02, grad_scale: 2.0 +2023-03-08 14:05:24,173 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1266.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:05:28,877 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1272.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:05:48,371 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1294.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:05:49,658 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.42 vs. limit=2.0 +2023-03-08 14:05:53,955 INFO [train.py:898] (1/4) Epoch 1, batch 1300, loss[loss=0.7602, simple_loss=0.6411, pruned_loss=0.478, over 18365.00 frames. ], tot_loss[loss=0.8908, simple_loss=0.6969, pruned_loss=0.6637, over 3583847.43 frames. ], batch size: 55, lr: 4.92e-02, grad_scale: 2.0 +2023-03-08 14:06:04,912 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+02 7.521e+02 1.026e+03 1.273e+03 2.275e+03, threshold=2.053e+03, percent-clipped=5.0 +2023-03-08 14:06:15,755 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1225, 3.8583, 4.0023, 4.1055, 3.5953, 3.9694, 3.7627, 4.0216], + device='cuda:1'), covar=tensor([0.1441, 0.2819, 0.1761, 0.1270, 0.2317, 0.1817, 0.2683, 0.1793], + device='cuda:1'), in_proj_covar=tensor([0.0034, 0.0033, 0.0035, 0.0032, 0.0034, 0.0033, 0.0035, 0.0036], + device='cuda:1'), out_proj_covar=tensor([3.3580e-05, 3.3876e-05, 3.3528e-05, 3.4066e-05, 3.4561e-05, 3.2754e-05, + 3.3666e-05, 3.5172e-05], device='cuda:1') +2023-03-08 14:06:38,459 INFO [train.py:898] (1/4) Epoch 1, batch 1350, loss[loss=0.6264, simple_loss=0.5383, pruned_loss=0.3813, over 18165.00 frames. ], tot_loss[loss=0.8521, simple_loss=0.6764, pruned_loss=0.6158, over 3585296.24 frames. ], batch size: 44, lr: 4.91e-02, grad_scale: 2.0 +2023-03-08 14:06:57,881 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4728, 3.4920, 3.8046, 3.6473, 3.7653, 3.7322, 3.6068, 3.7065], + device='cuda:1'), covar=tensor([0.2314, 0.3506, 0.1598, 0.2179, 0.2445, 0.2049, 0.2310, 0.2050], + device='cuda:1'), in_proj_covar=tensor([0.0039, 0.0052, 0.0035, 0.0044, 0.0048, 0.0043, 0.0045, 0.0043], + device='cuda:1'), out_proj_covar=tensor([3.6997e-05, 4.3903e-05, 3.2964e-05, 4.0421e-05, 4.1896e-05, 3.9347e-05, + 4.4286e-05, 3.6473e-05], device='cuda:1') +2023-03-08 14:07:24,100 INFO [train.py:898] (1/4) Epoch 1, batch 1400, loss[loss=0.6988, simple_loss=0.5999, pruned_loss=0.4231, over 18324.00 frames. ], tot_loss[loss=0.8154, simple_loss=0.657, pruned_loss=0.5718, over 3581053.53 frames. ], batch size: 54, lr: 4.91e-02, grad_scale: 2.0 +2023-03-08 14:07:34,533 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.4502, 4.5206, 4.6097, 4.6497, 4.8037, 4.8579, 4.6853, 4.5867], + device='cuda:1'), covar=tensor([0.2885, 0.4501, 0.2764, 0.3112, 0.3537, 0.2507, 0.3264, 0.3010], + device='cuda:1'), in_proj_covar=tensor([0.0042, 0.0056, 0.0038, 0.0047, 0.0052, 0.0046, 0.0049, 0.0046], + device='cuda:1'), out_proj_covar=tensor([4.0119e-05, 4.7585e-05, 3.5073e-05, 4.2591e-05, 4.5657e-05, 4.2664e-05, + 4.7777e-05, 3.8955e-05], device='cuda:1') +2023-03-08 14:07:35,944 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.189e+02 7.494e+02 9.042e+02 1.119e+03 2.290e+03, threshold=1.808e+03, percent-clipped=1.0 +2023-03-08 14:07:43,780 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0954, 5.0056, 4.7601, 4.9597, 5.1418, 4.8705, 4.8865, 4.4873], + device='cuda:1'), covar=tensor([0.2812, 0.2611, 0.5204, 0.4059, 0.3414, 0.4720, 0.4285, 0.6465], + device='cuda:1'), in_proj_covar=tensor([0.0077, 0.0075, 0.0089, 0.0077, 0.0081, 0.0098, 0.0095, 0.0102], + device='cuda:1'), out_proj_covar=tensor([7.6926e-05, 7.4147e-05, 8.3588e-05, 8.0032e-05, 7.8473e-05, 9.2262e-05, + 8.7625e-05, 9.4617e-05], device='cuda:1') +2023-03-08 14:07:55,147 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1434.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:08:09,504 INFO [train.py:898] (1/4) Epoch 1, batch 1450, loss[loss=0.7495, simple_loss=0.6333, pruned_loss=0.4594, over 18574.00 frames. ], tot_loss[loss=0.7788, simple_loss=0.6373, pruned_loss=0.5302, over 3590134.69 frames. ], batch size: 54, lr: 4.90e-02, grad_scale: 2.0 +2023-03-08 14:08:50,629 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1495.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-08 14:08:55,492 INFO [train.py:898] (1/4) Epoch 1, batch 1500, loss[loss=0.5353, simple_loss=0.4816, pruned_loss=0.3024, over 18408.00 frames. ], tot_loss[loss=0.7473, simple_loss=0.6207, pruned_loss=0.4944, over 3583857.95 frames. ], batch size: 42, lr: 4.89e-02, grad_scale: 2.0 +2023-03-08 14:08:57,720 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1503.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:09:03,802 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1509.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-08 14:09:07,634 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 6.843e+02 9.252e+02 1.183e+03 2.667e+03, threshold=1.850e+03, percent-clipped=7.0 +2023-03-08 14:09:43,238 INFO [train.py:898] (1/4) Epoch 1, batch 1550, loss[loss=0.6692, simple_loss=0.5828, pruned_loss=0.3915, over 12877.00 frames. ], tot_loss[loss=0.7161, simple_loss=0.6042, pruned_loss=0.4605, over 3593608.44 frames. ], batch size: 130, lr: 4.89e-02, grad_scale: 2.0 +2023-03-08 14:09:43,422 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1551.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:09:57,953 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1566.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:10:30,642 INFO [train.py:898] (1/4) Epoch 1, batch 1600, loss[loss=0.5798, simple_loss=0.5354, pruned_loss=0.3152, over 18394.00 frames. ], tot_loss[loss=0.6858, simple_loss=0.5873, pruned_loss=0.4295, over 3596724.63 frames. ], batch size: 50, lr: 4.88e-02, grad_scale: 4.0 +2023-03-08 14:10:33,646 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.4304, 2.2090, 2.6496, 2.4138, 2.5744, 2.5367, 2.8088, 2.5638], + device='cuda:1'), covar=tensor([0.4532, 0.5653, 0.3867, 0.4798, 0.5084, 0.4892, 0.3903, 0.4573], + device='cuda:1'), in_proj_covar=tensor([0.0059, 0.0063, 0.0056, 0.0058, 0.0058, 0.0063, 0.0059, 0.0053], + device='cuda:1'), out_proj_covar=tensor([5.2742e-05, 5.9208e-05, 5.1826e-05, 5.0213e-05, 5.1360e-05, 5.7813e-05, + 5.2357e-05, 4.7991e-05], device='cuda:1') +2023-03-08 14:10:39,886 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1611.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:10:41,385 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.398e+02 7.144e+02 8.708e+02 1.119e+03 2.244e+03, threshold=1.742e+03, percent-clipped=2.0 +2023-03-08 14:10:55,251 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1627.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-08 14:11:16,711 INFO [train.py:898] (1/4) Epoch 1, batch 1650, loss[loss=0.5823, simple_loss=0.551, pruned_loss=0.3069, over 18320.00 frames. ], tot_loss[loss=0.6606, simple_loss=0.5738, pruned_loss=0.4035, over 3594468.95 frames. ], batch size: 57, lr: 4.87e-02, grad_scale: 4.0 +2023-03-08 14:11:36,876 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1672.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-08 14:12:04,574 INFO [train.py:898] (1/4) Epoch 1, batch 1700, loss[loss=0.5273, simple_loss=0.4924, pruned_loss=0.2822, over 18419.00 frames. ], tot_loss[loss=0.6347, simple_loss=0.5595, pruned_loss=0.3784, over 3589916.88 frames. ], batch size: 48, lr: 4.86e-02, grad_scale: 4.0 +2023-03-08 14:12:15,867 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.590e+02 6.591e+02 8.765e+02 1.033e+03 1.987e+03, threshold=1.753e+03, percent-clipped=3.0 +2023-03-08 14:12:45,358 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-08 14:12:53,418 INFO [train.py:898] (1/4) Epoch 1, batch 1750, loss[loss=0.5359, simple_loss=0.5077, pruned_loss=0.2821, over 18529.00 frames. ], tot_loss[loss=0.6099, simple_loss=0.546, pruned_loss=0.355, over 3604382.38 frames. ], batch size: 49, lr: 4.86e-02, grad_scale: 4.0 +2023-03-08 14:13:30,717 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1790.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 14:13:41,373 INFO [train.py:898] (1/4) Epoch 1, batch 1800, loss[loss=0.5355, simple_loss=0.5116, pruned_loss=0.2792, over 18564.00 frames. ], tot_loss[loss=0.5928, simple_loss=0.5364, pruned_loss=0.3387, over 3603145.72 frames. ], batch size: 54, lr: 4.85e-02, grad_scale: 4.0 +2023-03-08 14:13:48,915 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1809.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 14:13:52,153 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.336e+02 7.100e+02 8.751e+02 1.042e+03 2.911e+03, threshold=1.750e+03, percent-clipped=4.0 +2023-03-08 14:14:17,014 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1838.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:14:28,756 INFO [train.py:898] (1/4) Epoch 1, batch 1850, loss[loss=0.4929, simple_loss=0.484, pruned_loss=0.2496, over 18575.00 frames. ], tot_loss[loss=0.5731, simple_loss=0.5256, pruned_loss=0.3211, over 3593927.00 frames. ], batch size: 54, lr: 4.84e-02, grad_scale: 4.0 +2023-03-08 14:14:35,109 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1857.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:14:35,258 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1857.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:14:47,411 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1870.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:15:15,868 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1899.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-08 14:15:17,454 INFO [train.py:898] (1/4) Epoch 1, batch 1900, loss[loss=0.4648, simple_loss=0.4649, pruned_loss=0.2311, over 18367.00 frames. ], tot_loss[loss=0.5558, simple_loss=0.5158, pruned_loss=0.3062, over 3586401.08 frames. ], batch size: 46, lr: 4.83e-02, grad_scale: 4.0 +2023-03-08 14:15:29,151 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.595e+02 6.983e+02 8.615e+02 1.111e+03 2.145e+03, threshold=1.723e+03, percent-clipped=1.0 +2023-03-08 14:15:34,092 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1918.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-08 14:15:37,684 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1922.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 14:15:46,101 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.3671, 5.2977, 4.9133, 5.2869, 5.1547, 5.5074, 5.0239, 4.9822], + device='cuda:1'), covar=tensor([0.0503, 0.0232, 0.0497, 0.0335, 0.0392, 0.0229, 0.0639, 0.0413], + device='cuda:1'), in_proj_covar=tensor([0.0030, 0.0027, 0.0031, 0.0028, 0.0033, 0.0027, 0.0032, 0.0031], + device='cuda:1'), out_proj_covar=tensor([2.7793e-05, 2.4605e-05, 2.8800e-05, 2.5811e-05, 2.9102e-05, 2.4619e-05, + 3.0304e-05, 2.7110e-05], device='cuda:1') +2023-03-08 14:15:46,131 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1931.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-08 14:16:06,107 INFO [train.py:898] (1/4) Epoch 1, batch 1950, loss[loss=0.5362, simple_loss=0.5073, pruned_loss=0.2826, over 12904.00 frames. ], tot_loss[loss=0.5395, simple_loss=0.507, pruned_loss=0.2923, over 3588621.26 frames. ], batch size: 131, lr: 4.83e-02, grad_scale: 4.0 +2023-03-08 14:16:18,246 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2489, 4.2020, 3.6241, 4.0037, 4.0286, 3.7999, 3.9191, 3.8058], + device='cuda:1'), covar=tensor([0.0534, 0.0607, 0.2596, 0.0998, 0.0669, 0.1320, 0.1100, 0.1011], + device='cuda:1'), in_proj_covar=tensor([0.0051, 0.0048, 0.0059, 0.0051, 0.0050, 0.0055, 0.0055, 0.0061], + device='cuda:1'), out_proj_covar=tensor([5.1313e-05, 4.6098e-05, 5.8114e-05, 4.9472e-05, 5.1985e-05, 5.3562e-05, + 5.2581e-05, 5.9878e-05], device='cuda:1') +2023-03-08 14:16:21,751 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1967.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:16:39,287 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.6745, 5.5468, 5.4862, 5.3530, 5.5639, 5.8113, 5.3973, 5.3628], + device='cuda:1'), covar=tensor([0.0628, 0.0660, 0.0675, 0.0771, 0.0680, 0.0488, 0.0708, 0.0917], + device='cuda:1'), in_proj_covar=tensor([0.0108, 0.0100, 0.0092, 0.0098, 0.0102, 0.0080, 0.0096, 0.0108], + device='cuda:1'), out_proj_covar=tensor([9.6746e-05, 9.8985e-05, 9.0323e-05, 8.9831e-05, 1.0209e-04, 7.5401e-05, + 8.7711e-05, 9.5783e-05], device='cuda:1') +2023-03-08 14:17:00,035 INFO [train.py:898] (1/4) Epoch 1, batch 2000, loss[loss=0.4105, simple_loss=0.4314, pruned_loss=0.1948, over 18491.00 frames. ], tot_loss[loss=0.5267, simple_loss=0.5, pruned_loss=0.2816, over 3568108.18 frames. ], batch size: 47, lr: 4.82e-02, grad_scale: 8.0 +2023-03-08 14:17:12,318 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.598e+02 6.794e+02 8.524e+02 1.041e+03 1.894e+03, threshold=1.705e+03, percent-clipped=5.0 +2023-03-08 14:17:39,948 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5455, 3.5137, 3.0407, 3.4808, 3.9645, 3.7392, 3.3320, 3.4105], + device='cuda:1'), covar=tensor([0.1159, 0.0823, 0.1336, 0.0894, 0.0425, 0.0836, 0.1218, 0.1053], + device='cuda:1'), in_proj_covar=tensor([0.0039, 0.0038, 0.0040, 0.0040, 0.0033, 0.0037, 0.0041, 0.0040], + device='cuda:1'), out_proj_covar=tensor([3.3683e-05, 3.1838e-05, 3.5836e-05, 3.3933e-05, 2.7234e-05, 3.1483e-05, + 3.5543e-05, 3.4730e-05], device='cuda:1') +2023-03-08 14:17:53,238 INFO [train.py:898] (1/4) Epoch 1, batch 2050, loss[loss=0.3974, simple_loss=0.424, pruned_loss=0.1854, over 18499.00 frames. ], tot_loss[loss=0.5075, simple_loss=0.4893, pruned_loss=0.2666, over 3572283.69 frames. ], batch size: 47, lr: 4.81e-02, grad_scale: 8.0 +2023-03-08 14:18:17,662 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8069, 4.6598, 4.5481, 4.5193, 4.6657, 4.3324, 4.9793, 4.9666], + device='cuda:1'), covar=tensor([0.0581, 0.1131, 0.0977, 0.0823, 0.0758, 0.0944, 0.0681, 0.0657], + device='cuda:1'), in_proj_covar=tensor([0.0053, 0.0051, 0.0056, 0.0050, 0.0057, 0.0059, 0.0055, 0.0051], + device='cuda:1'), out_proj_covar=tensor([4.3250e-05, 4.2856e-05, 4.7788e-05, 4.0834e-05, 4.8593e-05, 5.3185e-05, + 4.5200e-05, 4.2557e-05], device='cuda:1') +2023-03-08 14:18:34,030 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2090.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-08 14:18:45,450 INFO [train.py:898] (1/4) Epoch 1, batch 2100, loss[loss=0.4539, simple_loss=0.4655, pruned_loss=0.2212, over 17703.00 frames. ], tot_loss[loss=0.49, simple_loss=0.4794, pruned_loss=0.2532, over 3585527.22 frames. ], batch size: 70, lr: 4.80e-02, grad_scale: 8.0 +2023-03-08 14:18:58,624 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.519e+02 5.789e+02 7.175e+02 9.165e+02 1.305e+03, threshold=1.435e+03, percent-clipped=0.0 +2023-03-08 14:19:24,332 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2138.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:19:38,122 INFO [train.py:898] (1/4) Epoch 1, batch 2150, loss[loss=0.4689, simple_loss=0.4822, pruned_loss=0.2278, over 18137.00 frames. ], tot_loss[loss=0.4796, simple_loss=0.4743, pruned_loss=0.2448, over 3578138.69 frames. ], batch size: 62, lr: 4.79e-02, grad_scale: 8.0 +2023-03-08 14:20:23,712 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2194.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:20:31,428 INFO [train.py:898] (1/4) Epoch 1, batch 2200, loss[loss=0.4981, simple_loss=0.4931, pruned_loss=0.2516, over 13086.00 frames. ], tot_loss[loss=0.4681, simple_loss=0.4685, pruned_loss=0.2356, over 3583282.81 frames. ], batch size: 129, lr: 4.78e-02, grad_scale: 8.0 +2023-03-08 14:20:44,693 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.109e+02 6.450e+02 8.005e+02 9.321e+02 1.802e+03, threshold=1.601e+03, percent-clipped=2.0 +2023-03-08 14:20:44,919 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2213.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:20:54,173 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2222.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-08 14:20:58,251 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2226.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:21:12,430 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2480, 5.3788, 5.4613, 5.3543, 5.6612, 5.5401, 5.4067, 5.3865], + device='cuda:1'), covar=tensor([0.0126, 0.0073, 0.0056, 0.0054, 0.0030, 0.0053, 0.0046, 0.0089], + device='cuda:1'), in_proj_covar=tensor([0.0019, 0.0017, 0.0018, 0.0020, 0.0015, 0.0017, 0.0016, 0.0019], + device='cuda:1'), out_proj_covar=tensor([1.0052e-05, 9.7371e-06, 9.6521e-06, 1.1214e-05, 6.5940e-06, 9.0478e-06, + 7.9282e-06, 9.9418e-06], device='cuda:1') +2023-03-08 14:21:24,257 INFO [train.py:898] (1/4) Epoch 1, batch 2250, loss[loss=0.3978, simple_loss=0.4333, pruned_loss=0.1811, over 18482.00 frames. ], tot_loss[loss=0.457, simple_loss=0.4624, pruned_loss=0.2272, over 3590088.84 frames. ], batch size: 51, lr: 4.77e-02, grad_scale: 8.0 +2023-03-08 14:21:42,719 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2267.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 14:21:45,782 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2270.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:22:17,966 INFO [train.py:898] (1/4) Epoch 1, batch 2300, loss[loss=0.4422, simple_loss=0.4616, pruned_loss=0.2114, over 16159.00 frames. ], tot_loss[loss=0.4488, simple_loss=0.4577, pruned_loss=0.2211, over 3593325.24 frames. ], batch size: 94, lr: 4.77e-02, grad_scale: 8.0 +2023-03-08 14:22:28,162 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.9206, 5.8559, 5.6051, 5.6015, 5.8739, 6.2585, 5.5283, 5.5789], + device='cuda:1'), covar=tensor([0.0514, 0.0561, 0.0672, 0.0538, 0.0772, 0.0354, 0.0656, 0.1304], + device='cuda:1'), in_proj_covar=tensor([0.0121, 0.0126, 0.0101, 0.0104, 0.0119, 0.0097, 0.0103, 0.0133], + device='cuda:1'), out_proj_covar=tensor([1.1012e-04, 1.3005e-04, 1.0418e-04, 9.8385e-05, 1.2095e-04, 9.4304e-05, + 9.8066e-05, 1.2957e-04], device='cuda:1') +2023-03-08 14:22:31,081 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.655e+02 6.513e+02 7.644e+02 9.367e+02 1.783e+03, threshold=1.529e+03, percent-clipped=1.0 +2023-03-08 14:22:33,366 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2315.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:23:00,842 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2340.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:23:11,910 INFO [train.py:898] (1/4) Epoch 1, batch 2350, loss[loss=0.3664, simple_loss=0.3963, pruned_loss=0.1682, over 18408.00 frames. ], tot_loss[loss=0.4382, simple_loss=0.4509, pruned_loss=0.2136, over 3593203.46 frames. ], batch size: 42, lr: 4.76e-02, grad_scale: 16.0 +2023-03-08 14:23:49,670 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1905, 3.9838, 4.2095, 4.5100, 3.6101, 4.2865, 4.4228, 3.8788], + device='cuda:1'), covar=tensor([0.0302, 0.0357, 0.0296, 0.0141, 0.0498, 0.0359, 0.0199, 0.0498], + device='cuda:1'), in_proj_covar=tensor([0.0029, 0.0025, 0.0030, 0.0028, 0.0025, 0.0028, 0.0027, 0.0027], + device='cuda:1'), out_proj_covar=tensor([1.9414e-05, 1.6860e-05, 2.0968e-05, 1.9666e-05, 1.7024e-05, 1.9162e-05, + 1.7137e-05, 1.9319e-05], device='cuda:1') +2023-03-08 14:24:05,195 INFO [train.py:898] (1/4) Epoch 1, batch 2400, loss[loss=0.3984, simple_loss=0.4344, pruned_loss=0.1812, over 18138.00 frames. ], tot_loss[loss=0.4303, simple_loss=0.4461, pruned_loss=0.2079, over 3584311.65 frames. ], batch size: 62, lr: 4.75e-02, grad_scale: 16.0 +2023-03-08 14:24:05,559 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2401.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-08 14:24:17,275 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0432, 5.2436, 4.9695, 5.3009, 5.1175, 4.8693, 4.9411, 4.7629], + device='cuda:1'), covar=tensor([0.0076, 0.0076, 0.0142, 0.0044, 0.0079, 0.0208, 0.0164, 0.0166], + device='cuda:1'), in_proj_covar=tensor([0.0019, 0.0015, 0.0019, 0.0015, 0.0015, 0.0020, 0.0020, 0.0019], + device='cuda:1'), out_proj_covar=tensor([1.4343e-05, 1.1040e-05, 1.5480e-05, 1.0744e-05, 1.1751e-05, 1.6467e-05, + 1.6887e-05, 1.4768e-05], device='cuda:1') +2023-03-08 14:24:17,838 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.759e+02 6.046e+02 7.526e+02 8.987e+02 1.408e+03, threshold=1.505e+03, percent-clipped=0.0 +2023-03-08 14:24:31,708 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3984, 5.1101, 5.0634, 5.3644, 5.1783, 4.7867, 5.1053, 4.6479], + device='cuda:1'), covar=tensor([0.0180, 0.0272, 0.0677, 0.0289, 0.0270, 0.0433, 0.0294, 0.0419], + device='cuda:1'), in_proj_covar=tensor([0.0080, 0.0094, 0.0102, 0.0090, 0.0091, 0.0111, 0.0108, 0.0109], + device='cuda:1'), out_proj_covar=tensor([8.1667e-05, 1.0196e-04, 1.1492e-04, 9.5185e-05, 9.1870e-05, 1.2520e-04, + 1.2067e-04, 1.1869e-04], device='cuda:1') +2023-03-08 14:24:58,108 INFO [train.py:898] (1/4) Epoch 1, batch 2450, loss[loss=0.4476, simple_loss=0.4565, pruned_loss=0.2193, over 15834.00 frames. ], tot_loss[loss=0.4232, simple_loss=0.442, pruned_loss=0.2027, over 3581222.96 frames. ], batch size: 94, lr: 4.74e-02, grad_scale: 16.0 +2023-03-08 14:25:03,395 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.58 vs. limit=5.0 +2023-03-08 14:25:45,564 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2494.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 14:25:53,140 INFO [train.py:898] (1/4) Epoch 1, batch 2500, loss[loss=0.3765, simple_loss=0.419, pruned_loss=0.167, over 18314.00 frames. ], tot_loss[loss=0.4168, simple_loss=0.4385, pruned_loss=0.198, over 3591758.66 frames. ], batch size: 54, lr: 4.73e-02, grad_scale: 16.0 +2023-03-08 14:26:05,165 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.211e+02 5.891e+02 7.187e+02 8.781e+02 1.767e+03, threshold=1.437e+03, percent-clipped=2.0 +2023-03-08 14:26:05,530 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2513.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:26:19,732 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2526.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:26:34,674 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2854, 5.7573, 5.7845, 5.6120, 5.2627, 5.6994, 5.8530, 5.5270], + device='cuda:1'), covar=tensor([0.0603, 0.0839, 0.0364, 0.0404, 0.0627, 0.0441, 0.0526, 0.0586], + device='cuda:1'), in_proj_covar=tensor([0.0141, 0.0135, 0.0102, 0.0117, 0.0129, 0.0108, 0.0108, 0.0113], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-08 14:26:37,716 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2542.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:26:41,540 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.14 vs. limit=2.0 +2023-03-08 14:26:47,803 INFO [train.py:898] (1/4) Epoch 1, batch 2550, loss[loss=0.3967, simple_loss=0.4372, pruned_loss=0.1781, over 17788.00 frames. ], tot_loss[loss=0.4109, simple_loss=0.4352, pruned_loss=0.1936, over 3597818.60 frames. ], batch size: 70, lr: 4.72e-02, grad_scale: 16.0 +2023-03-08 14:26:58,311 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2561.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:27:12,135 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2574.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:27:42,177 INFO [train.py:898] (1/4) Epoch 1, batch 2600, loss[loss=0.3685, simple_loss=0.4091, pruned_loss=0.164, over 18544.00 frames. ], tot_loss[loss=0.406, simple_loss=0.4322, pruned_loss=0.1901, over 3597177.74 frames. ], batch size: 49, lr: 4.71e-02, grad_scale: 16.0 +2023-03-08 14:27:55,525 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.922e+02 6.237e+02 7.424e+02 9.705e+02 2.435e+03, threshold=1.485e+03, percent-clipped=1.0 +2023-03-08 14:28:36,989 INFO [train.py:898] (1/4) Epoch 1, batch 2650, loss[loss=0.4117, simple_loss=0.4472, pruned_loss=0.1881, over 18317.00 frames. ], tot_loss[loss=0.3995, simple_loss=0.4285, pruned_loss=0.1854, over 3596972.24 frames. ], batch size: 54, lr: 4.70e-02, grad_scale: 16.0 +2023-03-08 14:29:14,903 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2685.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:29:27,252 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2696.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:29:32,038 INFO [train.py:898] (1/4) Epoch 1, batch 2700, loss[loss=0.4714, simple_loss=0.4709, pruned_loss=0.236, over 12146.00 frames. ], tot_loss[loss=0.3948, simple_loss=0.4254, pruned_loss=0.1823, over 3587700.67 frames. ], batch size: 130, lr: 4.69e-02, grad_scale: 16.0 +2023-03-08 14:29:45,138 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.900e+02 5.817e+02 6.597e+02 8.414e+02 1.857e+03, threshold=1.319e+03, percent-clipped=1.0 +2023-03-08 14:30:21,402 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1640, 5.3573, 5.2756, 4.9665, 4.7886, 5.0597, 4.7373, 5.0530], + device='cuda:1'), covar=tensor([0.0619, 0.0210, 0.0243, 0.0249, 0.0463, 0.0300, 0.0503, 0.0267], + device='cuda:1'), in_proj_covar=tensor([0.0067, 0.0077, 0.0064, 0.0066, 0.0076, 0.0075, 0.0081, 0.0072], + device='cuda:1'), out_proj_covar=tensor([7.3056e-05, 7.8988e-05, 6.5168e-05, 7.9922e-05, 8.9262e-05, 8.2466e-05, + 9.8561e-05, 7.6479e-05], device='cuda:1') +2023-03-08 14:30:22,461 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2746.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-08 14:30:27,330 INFO [train.py:898] (1/4) Epoch 1, batch 2750, loss[loss=0.3181, simple_loss=0.3632, pruned_loss=0.1365, over 18398.00 frames. ], tot_loss[loss=0.3896, simple_loss=0.422, pruned_loss=0.1787, over 3584505.98 frames. ], batch size: 42, lr: 4.68e-02, grad_scale: 16.0 +2023-03-08 14:31:22,090 INFO [train.py:898] (1/4) Epoch 1, batch 2800, loss[loss=0.3661, simple_loss=0.4135, pruned_loss=0.1593, over 18374.00 frames. ], tot_loss[loss=0.3885, simple_loss=0.4214, pruned_loss=0.1779, over 3592565.93 frames. ], batch size: 50, lr: 4.67e-02, grad_scale: 16.0 +2023-03-08 14:31:35,366 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.828e+02 5.568e+02 7.020e+02 9.459e+02 2.422e+03, threshold=1.404e+03, percent-clipped=9.0 +2023-03-08 14:31:47,505 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2824.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:32:16,544 INFO [train.py:898] (1/4) Epoch 1, batch 2850, loss[loss=0.4118, simple_loss=0.4304, pruned_loss=0.1965, over 18297.00 frames. ], tot_loss[loss=0.3836, simple_loss=0.4179, pruned_loss=0.1748, over 3597549.12 frames. ], batch size: 49, lr: 4.66e-02, grad_scale: 16.0 +2023-03-08 14:32:53,646 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2885.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-08 14:33:10,881 INFO [train.py:898] (1/4) Epoch 1, batch 2900, loss[loss=0.3437, simple_loss=0.3971, pruned_loss=0.1451, over 18326.00 frames. ], tot_loss[loss=0.3803, simple_loss=0.4156, pruned_loss=0.1726, over 3582325.00 frames. ], batch size: 56, lr: 4.65e-02, grad_scale: 16.0 +2023-03-08 14:33:23,642 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.949e+02 5.658e+02 6.946e+02 8.980e+02 2.248e+03, threshold=1.389e+03, percent-clipped=2.0 +2023-03-08 14:33:50,382 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2937.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:34:05,721 INFO [train.py:898] (1/4) Epoch 1, batch 2950, loss[loss=0.3311, simple_loss=0.3784, pruned_loss=0.1419, over 18374.00 frames. ], tot_loss[loss=0.3765, simple_loss=0.4127, pruned_loss=0.1702, over 3575982.00 frames. ], batch size: 50, lr: 4.64e-02, grad_scale: 16.0 +2023-03-08 14:34:54,926 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2996.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 14:34:57,663 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2998.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-08 14:35:00,656 INFO [train.py:898] (1/4) Epoch 1, batch 3000, loss[loss=0.3415, simple_loss=0.3906, pruned_loss=0.1463, over 18374.00 frames. ], tot_loss[loss=0.3753, simple_loss=0.412, pruned_loss=0.1693, over 3563274.15 frames. ], batch size: 56, lr: 4.63e-02, grad_scale: 8.0 +2023-03-08 14:35:00,657 INFO [train.py:923] (1/4) Computing validation loss +2023-03-08 14:35:12,697 INFO [train.py:932] (1/4) Epoch 1, validation: loss=0.2954, simple_loss=0.387, pruned_loss=0.102, over 944034.00 frames. +2023-03-08 14:35:12,698 INFO [train.py:933] (1/4) Maximum memory allocated so far is 17755MB +2023-03-08 14:35:26,916 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.611e+02 5.968e+02 7.272e+02 9.035e+02 2.166e+03, threshold=1.454e+03, percent-clipped=4.0 +2023-03-08 14:35:32,486 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3019.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:35:57,666 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3041.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:36:00,948 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3044.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:36:08,352 INFO [train.py:898] (1/4) Epoch 1, batch 3050, loss[loss=0.3899, simple_loss=0.4281, pruned_loss=0.1758, over 17874.00 frames. ], tot_loss[loss=0.3715, simple_loss=0.4094, pruned_loss=0.1668, over 3574191.58 frames. ], batch size: 70, lr: 4.62e-02, grad_scale: 8.0 +2023-03-08 14:36:41,084 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3080.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-08 14:37:00,326 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6000, 4.5231, 4.2407, 4.6728, 4.5396, 4.2745, 4.4813, 3.9896], + device='cuda:1'), covar=tensor([0.0272, 0.0273, 0.0869, 0.0354, 0.0221, 0.0345, 0.0309, 0.0482], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0116, 0.0144, 0.0112, 0.0107, 0.0134, 0.0127, 0.0134], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 14:37:04,076 INFO [train.py:898] (1/4) Epoch 1, batch 3100, loss[loss=0.3554, simple_loss=0.3822, pruned_loss=0.1643, over 18407.00 frames. ], tot_loss[loss=0.3677, simple_loss=0.4068, pruned_loss=0.1643, over 3576114.49 frames. ], batch size: 42, lr: 4.61e-02, grad_scale: 8.0 +2023-03-08 14:37:18,600 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.360e+02 5.789e+02 7.194e+02 8.721e+02 2.161e+03, threshold=1.439e+03, percent-clipped=3.0 +2023-03-08 14:37:59,560 INFO [train.py:898] (1/4) Epoch 1, batch 3150, loss[loss=0.3506, simple_loss=0.378, pruned_loss=0.1616, over 18375.00 frames. ], tot_loss[loss=0.364, simple_loss=0.404, pruned_loss=0.162, over 3574656.32 frames. ], batch size: 42, lr: 4.60e-02, grad_scale: 8.0 +2023-03-08 14:38:28,757 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6395, 4.8188, 4.7197, 4.8274, 4.1538, 5.0237, 4.7423, 4.9560], + device='cuda:1'), covar=tensor([0.0377, 0.0304, 0.0233, 0.0405, 0.2383, 0.0227, 0.0310, 0.0160], + device='cuda:1'), in_proj_covar=tensor([0.0038, 0.0040, 0.0031, 0.0040, 0.0063, 0.0031, 0.0036, 0.0035], + device='cuda:1'), out_proj_covar=tensor([2.0003e-05, 2.3782e-05, 1.4744e-05, 2.4550e-05, 4.4409e-05, 1.4000e-05, + 1.7376e-05, 1.7060e-05], device='cuda:1') +2023-03-08 14:38:31,219 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3180.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:38:36,001 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-08 14:38:38,083 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=2.03 vs. limit=2.0 +2023-03-08 14:38:54,791 INFO [train.py:898] (1/4) Epoch 1, batch 3200, loss[loss=0.3094, simple_loss=0.356, pruned_loss=0.1314, over 18437.00 frames. ], tot_loss[loss=0.3607, simple_loss=0.4015, pruned_loss=0.1599, over 3579078.37 frames. ], batch size: 42, lr: 4.59e-02, grad_scale: 8.0 +2023-03-08 14:39:04,479 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=7.02 vs. limit=5.0 +2023-03-08 14:39:08,066 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 6.227e+02 7.762e+02 9.563e+02 2.131e+03, threshold=1.552e+03, percent-clipped=3.0 +2023-03-08 14:39:20,766 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7200, 4.1941, 3.8672, 3.8443, 3.5724, 4.0284, 4.0795, 3.6512], + device='cuda:1'), covar=tensor([0.0152, 0.0114, 0.0147, 0.0162, 0.0286, 0.0106, 0.0077, 0.0245], + device='cuda:1'), in_proj_covar=tensor([0.0023, 0.0020, 0.0022, 0.0023, 0.0030, 0.0021, 0.0020, 0.0028], + device='cuda:1'), out_proj_covar=tensor([1.9503e-05, 1.4243e-05, 1.7159e-05, 1.9501e-05, 2.2809e-05, 1.4629e-05, + 1.4056e-05, 2.2185e-05], device='cuda:1') +2023-03-08 14:39:34,394 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.17 vs. limit=5.0 +2023-03-08 14:39:49,501 INFO [train.py:898] (1/4) Epoch 1, batch 3250, loss[loss=0.3462, simple_loss=0.3961, pruned_loss=0.1481, over 18495.00 frames. ], tot_loss[loss=0.3614, simple_loss=0.402, pruned_loss=0.1604, over 3568308.51 frames. ], batch size: 53, lr: 4.58e-02, grad_scale: 8.0 +2023-03-08 14:40:15,228 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1715, 4.8264, 4.7587, 4.6108, 4.4218, 4.5563, 4.4267, 4.5912], + device='cuda:1'), covar=tensor([0.0319, 0.0202, 0.0200, 0.0221, 0.0344, 0.0225, 0.0366, 0.0251], + device='cuda:1'), in_proj_covar=tensor([0.0070, 0.0083, 0.0074, 0.0071, 0.0082, 0.0085, 0.0090, 0.0082], + device='cuda:1'), out_proj_covar=tensor([8.8391e-05, 9.6404e-05, 8.2174e-05, 9.8171e-05, 1.1376e-04, 1.0760e-04, + 1.2804e-04, 1.0448e-04], device='cuda:1') +2023-03-08 14:40:30,581 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3288.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:40:35,789 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3293.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:40:40,874 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1946, 4.9173, 4.2504, 4.4671, 3.5794, 3.3117, 4.3505, 4.3325], + device='cuda:1'), covar=tensor([0.0148, 0.0068, 0.0150, 0.0077, 0.0380, 0.0484, 0.0122, 0.0057], + device='cuda:1'), in_proj_covar=tensor([0.0025, 0.0017, 0.0014, 0.0019, 0.0029, 0.0026, 0.0019, 0.0014], + device='cuda:1'), out_proj_covar=tensor([2.7051e-05, 1.7968e-05, 1.3130e-05, 2.0534e-05, 3.2376e-05, 2.9344e-05, + 1.9874e-05, 1.2415e-05], device='cuda:1') +2023-03-08 14:40:45,134 INFO [train.py:898] (1/4) Epoch 1, batch 3300, loss[loss=0.4029, simple_loss=0.427, pruned_loss=0.1893, over 12668.00 frames. ], tot_loss[loss=0.3584, simple_loss=0.4003, pruned_loss=0.1582, over 3579091.75 frames. ], batch size: 131, lr: 4.57e-02, grad_scale: 8.0 +2023-03-08 14:40:58,791 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.218e+02 5.885e+02 6.635e+02 8.607e+02 2.408e+03, threshold=1.327e+03, percent-clipped=3.0 +2023-03-08 14:41:29,358 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3341.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:41:38,130 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3349.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-08 14:41:40,512 INFO [train.py:898] (1/4) Epoch 1, batch 3350, loss[loss=0.4146, simple_loss=0.4289, pruned_loss=0.2002, over 12575.00 frames. ], tot_loss[loss=0.356, simple_loss=0.399, pruned_loss=0.1565, over 3572861.04 frames. ], batch size: 131, lr: 4.56e-02, grad_scale: 8.0 +2023-03-08 14:42:05,776 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.2402, 3.0197, 2.7828, 2.9767, 2.9904, 3.1081, 2.8795, 3.1853], + device='cuda:1'), covar=tensor([0.0592, 0.0710, 0.0512, 0.0403, 0.1362, 0.0821, 0.0410, 0.0255], + device='cuda:1'), in_proj_covar=tensor([0.0042, 0.0041, 0.0056, 0.0049, 0.0049, 0.0042, 0.0040, 0.0047], + device='cuda:1'), out_proj_covar=tensor([4.8814e-05, 5.1525e-05, 5.3662e-05, 4.3793e-05, 5.3121e-05, 4.4422e-05, + 3.9917e-05, 3.9356e-05], device='cuda:1') +2023-03-08 14:42:06,530 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3375.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:42:22,283 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3389.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:42:36,012 INFO [train.py:898] (1/4) Epoch 1, batch 3400, loss[loss=0.3649, simple_loss=0.4128, pruned_loss=0.1585, over 18396.00 frames. ], tot_loss[loss=0.3552, simple_loss=0.3988, pruned_loss=0.1558, over 3567632.98 frames. ], batch size: 52, lr: 4.55e-02, grad_scale: 8.0 +2023-03-08 14:42:50,976 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.931e+02 5.703e+02 6.610e+02 8.336e+02 1.336e+03, threshold=1.322e+03, percent-clipped=1.0 +2023-03-08 14:43:31,443 INFO [train.py:898] (1/4) Epoch 1, batch 3450, loss[loss=0.3029, simple_loss=0.3607, pruned_loss=0.1226, over 18418.00 frames. ], tot_loss[loss=0.3539, simple_loss=0.398, pruned_loss=0.1549, over 3567852.24 frames. ], batch size: 48, lr: 4.54e-02, grad_scale: 8.0 +2023-03-08 14:44:04,070 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3480.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:44:24,707 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.34 vs. limit=5.0 +2023-03-08 14:44:27,334 INFO [train.py:898] (1/4) Epoch 1, batch 3500, loss[loss=0.3114, simple_loss=0.37, pruned_loss=0.1264, over 18265.00 frames. ], tot_loss[loss=0.3518, simple_loss=0.3967, pruned_loss=0.1535, over 3579505.65 frames. ], batch size: 47, lr: 4.53e-02, grad_scale: 8.0 +2023-03-08 14:44:41,378 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=5.17 vs. limit=5.0 +2023-03-08 14:44:42,688 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.394e+02 6.336e+02 7.785e+02 9.331e+02 2.014e+03, threshold=1.557e+03, percent-clipped=6.0 +2023-03-08 14:44:58,032 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3528.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:45:16,061 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3546.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:45:20,678 INFO [train.py:898] (1/4) Epoch 1, batch 3550, loss[loss=0.3398, simple_loss=0.401, pruned_loss=0.1393, over 18561.00 frames. ], tot_loss[loss=0.3519, simple_loss=0.3965, pruned_loss=0.1537, over 3582997.36 frames. ], batch size: 54, lr: 4.51e-02, grad_scale: 8.0 +2023-03-08 14:45:43,067 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=2.02 vs. limit=2.0 +2023-03-08 14:46:04,710 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3593.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:46:12,806 INFO [train.py:898] (1/4) Epoch 1, batch 3600, loss[loss=0.386, simple_loss=0.4216, pruned_loss=0.1752, over 17750.00 frames. ], tot_loss[loss=0.3496, simple_loss=0.3952, pruned_loss=0.1521, over 3594006.21 frames. ], batch size: 70, lr: 4.50e-02, grad_scale: 8.0 +2023-03-08 14:46:19,782 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3607.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-08 14:46:26,694 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.777e+02 7.280e+02 9.972e+02 1.228e+03 2.916e+03, threshold=1.994e+03, percent-clipped=11.0 +2023-03-08 14:46:41,185 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0855, 2.5030, 1.9382, 2.8526, 2.7633, 3.2239, 2.0792, 2.7567], + device='cuda:1'), covar=tensor([0.0179, 0.0769, 0.1256, 0.0280, 0.0317, 0.0153, 0.1025, 0.0312], + device='cuda:1'), in_proj_covar=tensor([0.0030, 0.0048, 0.0050, 0.0028, 0.0035, 0.0030, 0.0046, 0.0036], + device='cuda:1'), out_proj_covar=tensor([2.3111e-05, 4.9475e-05, 5.2859e-05, 2.7653e-05, 2.9226e-05, 2.4568e-05, + 4.4564e-05, 3.0729e-05], device='cuda:1') +2023-03-08 14:47:17,204 INFO [train.py:898] (1/4) Epoch 2, batch 0, loss[loss=0.3551, simple_loss=0.3992, pruned_loss=0.1555, over 18272.00 frames. ], tot_loss[loss=0.3551, simple_loss=0.3992, pruned_loss=0.1555, over 18272.00 frames. ], batch size: 49, lr: 4.41e-02, grad_scale: 8.0 +2023-03-08 14:47:17,204 INFO [train.py:923] (1/4) Computing validation loss +2023-03-08 14:47:29,037 INFO [train.py:932] (1/4) Epoch 2, validation: loss=0.2643, simple_loss=0.3556, pruned_loss=0.08646, over 944034.00 frames. +2023-03-08 14:47:29,038 INFO [train.py:933] (1/4) Maximum memory allocated so far is 18857MB +2023-03-08 14:47:35,867 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3641.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:47:39,173 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3644.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 14:48:15,996 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3675.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:48:17,151 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3676.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:48:26,909 INFO [train.py:898] (1/4) Epoch 2, batch 50, loss[loss=0.3541, simple_loss=0.4101, pruned_loss=0.1491, over 18358.00 frames. ], tot_loss[loss=0.3352, simple_loss=0.3849, pruned_loss=0.1428, over 811669.61 frames. ], batch size: 55, lr: 4.40e-02, grad_scale: 8.0 +2023-03-08 14:48:29,731 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6158, 2.8490, 1.4883, 3.5585, 2.9056, 3.7636, 1.8828, 3.4566], + device='cuda:1'), covar=tensor([0.0128, 0.0983, 0.2068, 0.0305, 0.0478, 0.0116, 0.1396, 0.0238], + device='cuda:1'), in_proj_covar=tensor([0.0033, 0.0054, 0.0056, 0.0030, 0.0039, 0.0033, 0.0051, 0.0039], + device='cuda:1'), out_proj_covar=tensor([2.5841e-05, 5.5750e-05, 6.0093e-05, 3.0443e-05, 3.2675e-05, 2.6803e-05, + 5.0193e-05, 3.3872e-05], device='cuda:1') +2023-03-08 14:49:00,565 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 4.344e+02 6.253e+02 7.943e+02 9.806e+02 1.695e+03, threshold=1.589e+03, percent-clipped=0.0 +2023-03-08 14:49:11,435 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3723.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:49:25,278 INFO [train.py:898] (1/4) Epoch 2, batch 100, loss[loss=0.3677, simple_loss=0.4111, pruned_loss=0.1622, over 16210.00 frames. ], tot_loss[loss=0.3328, simple_loss=0.3829, pruned_loss=0.1413, over 1432461.07 frames. ], batch size: 94, lr: 4.39e-02, grad_scale: 8.0 +2023-03-08 14:49:27,949 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3737.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-08 14:50:22,856 INFO [train.py:898] (1/4) Epoch 2, batch 150, loss[loss=0.3381, simple_loss=0.3895, pruned_loss=0.1433, over 18487.00 frames. ], tot_loss[loss=0.3365, simple_loss=0.3852, pruned_loss=0.144, over 1906469.68 frames. ], batch size: 51, lr: 4.38e-02, grad_scale: 8.0 +2023-03-08 14:50:55,228 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.556e+02 6.308e+02 7.748e+02 1.009e+03 1.866e+03, threshold=1.550e+03, percent-clipped=3.0 +2023-03-08 14:51:21,235 INFO [train.py:898] (1/4) Epoch 2, batch 200, loss[loss=0.3225, simple_loss=0.3786, pruned_loss=0.1332, over 18563.00 frames. ], tot_loss[loss=0.3375, simple_loss=0.3863, pruned_loss=0.1443, over 2287416.12 frames. ], batch size: 54, lr: 4.37e-02, grad_scale: 8.0 +2023-03-08 14:52:20,108 INFO [train.py:898] (1/4) Epoch 2, batch 250, loss[loss=0.2621, simple_loss=0.3317, pruned_loss=0.09631, over 18364.00 frames. ], tot_loss[loss=0.3358, simple_loss=0.3844, pruned_loss=0.1436, over 2578190.61 frames. ], batch size: 46, lr: 4.36e-02, grad_scale: 8.0 +2023-03-08 14:52:39,007 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7275, 4.5778, 4.6690, 4.7598, 4.4936, 4.3988, 4.9462, 4.8401], + device='cuda:1'), covar=tensor([0.0113, 0.0201, 0.0171, 0.0114, 0.0166, 0.0169, 0.0158, 0.0146], + device='cuda:1'), in_proj_covar=tensor([0.0060, 0.0054, 0.0052, 0.0054, 0.0056, 0.0063, 0.0053, 0.0051], + device='cuda:1'), out_proj_covar=tensor([9.6627e-05, 7.4493e-05, 7.2855e-05, 7.1532e-05, 8.8162e-05, 1.0249e-04, + 7.5550e-05, 6.7648e-05], device='cuda:1') +2023-03-08 14:52:39,972 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3902.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:52:47,838 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6781, 4.6009, 4.7011, 4.7551, 4.4952, 4.3608, 4.9819, 4.8328], + device='cuda:1'), covar=tensor([0.0121, 0.0164, 0.0215, 0.0116, 0.0182, 0.0173, 0.0132, 0.0154], + device='cuda:1'), in_proj_covar=tensor([0.0060, 0.0053, 0.0051, 0.0054, 0.0056, 0.0063, 0.0052, 0.0051], + device='cuda:1'), out_proj_covar=tensor([9.6600e-05, 7.4064e-05, 7.2892e-05, 7.1620e-05, 8.7935e-05, 1.0247e-04, + 7.4909e-05, 6.7723e-05], device='cuda:1') +2023-03-08 14:52:53,090 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.269e+02 6.436e+02 8.265e+02 1.082e+03 2.310e+03, threshold=1.653e+03, percent-clipped=6.0 +2023-03-08 14:52:58,059 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1164, 4.1787, 3.8202, 3.5348, 3.8596, 3.7135, 3.5978, 3.3319], + device='cuda:1'), covar=tensor([0.0860, 0.0234, 0.0165, 0.0396, 0.0141, 0.0383, 0.0221, 0.0587], + device='cuda:1'), in_proj_covar=tensor([0.0028, 0.0022, 0.0020, 0.0022, 0.0014, 0.0025, 0.0017, 0.0026], + device='cuda:1'), out_proj_covar=tensor([1.7694e-05, 1.3486e-05, 1.1419e-05, 1.3337e-05, 7.4679e-06, 1.6427e-05, + 1.0088e-05, 1.6503e-05], device='cuda:1') +2023-03-08 14:53:08,272 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3926.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:53:09,935 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.01 vs. limit=2.0 +2023-03-08 14:53:18,697 INFO [train.py:898] (1/4) Epoch 2, batch 300, loss[loss=0.3412, simple_loss=0.3928, pruned_loss=0.1448, over 17908.00 frames. ], tot_loss[loss=0.3342, simple_loss=0.3838, pruned_loss=0.1423, over 2802511.75 frames. ], batch size: 65, lr: 4.35e-02, grad_scale: 8.0 +2023-03-08 14:53:28,559 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9624, 5.0161, 3.6528, 4.3619, 4.8677, 3.4495, 4.6257, 4.1376], + device='cuda:1'), covar=tensor([0.0061, 0.0084, 0.1352, 0.0087, 0.0063, 0.0641, 0.0228, 0.0408], + device='cuda:1'), in_proj_covar=tensor([0.0044, 0.0041, 0.0110, 0.0044, 0.0037, 0.0076, 0.0068, 0.0064], + device='cuda:1'), out_proj_covar=tensor([3.3585e-05, 3.3035e-05, 9.9011e-05, 3.3399e-05, 2.8137e-05, 6.8321e-05, + 6.2155e-05, 5.9343e-05], device='cuda:1') +2023-03-08 14:53:29,627 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3944.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:53:31,029 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.75 vs. limit=2.0 +2023-03-08 14:53:38,789 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3352, 3.1760, 2.4624, 2.9063, 3.1476, 3.3398, 2.6852, 2.7311], + device='cuda:1'), covar=tensor([0.0415, 0.0336, 0.0678, 0.0371, 0.1146, 0.0484, 0.0415, 0.0397], + device='cuda:1'), in_proj_covar=tensor([0.0042, 0.0038, 0.0065, 0.0055, 0.0049, 0.0039, 0.0043, 0.0052], + device='cuda:1'), out_proj_covar=tensor([5.2821e-05, 5.1108e-05, 7.0420e-05, 5.5276e-05, 6.0691e-05, 4.6404e-05, + 4.6131e-05, 5.0919e-05], device='cuda:1') +2023-03-08 14:53:46,584 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1275, 4.9152, 4.8640, 4.5532, 4.4729, 4.6520, 4.2699, 4.5941], + device='cuda:1'), covar=tensor([0.0302, 0.0263, 0.0197, 0.0225, 0.0453, 0.0256, 0.0646, 0.0298], + device='cuda:1'), in_proj_covar=tensor([0.0075, 0.0094, 0.0082, 0.0076, 0.0087, 0.0092, 0.0102, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-08 14:54:17,748 INFO [train.py:898] (1/4) Epoch 2, batch 350, loss[loss=0.2967, simple_loss=0.3574, pruned_loss=0.118, over 18399.00 frames. ], tot_loss[loss=0.3314, simple_loss=0.382, pruned_loss=0.1405, over 2983131.45 frames. ], batch size: 48, lr: 4.34e-02, grad_scale: 8.0 +2023-03-08 14:54:20,262 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.7462, 6.1721, 5.9145, 5.8653, 5.5538, 6.0236, 6.2549, 6.1025], + device='cuda:1'), covar=tensor([0.0604, 0.0532, 0.0302, 0.0460, 0.1220, 0.0337, 0.0429, 0.0447], + device='cuda:1'), in_proj_covar=tensor([0.0201, 0.0198, 0.0151, 0.0186, 0.0227, 0.0163, 0.0172, 0.0166], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 14:54:20,437 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3987.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:54:26,776 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3992.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:54:26,969 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2411, 3.1339, 1.5760, 3.0543, 2.6764, 4.3277, 2.0154, 3.6621], + device='cuda:1'), covar=tensor([0.0129, 0.0942, 0.2159, 0.0429, 0.0668, 0.0121, 0.1470, 0.0324], + device='cuda:1'), in_proj_covar=tensor([0.0039, 0.0064, 0.0067, 0.0034, 0.0051, 0.0038, 0.0063, 0.0050], + device='cuda:1'), out_proj_covar=tensor([3.0622e-05, 6.7860e-05, 7.4593e-05, 3.6296e-05, 4.5530e-05, 3.0598e-05, + 6.2817e-05, 4.4569e-05], device='cuda:1') +2023-03-08 14:54:56,245 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.948e+02 5.340e+02 6.954e+02 8.950e+02 2.037e+03, threshold=1.391e+03, percent-clipped=3.0 +2023-03-08 14:55:18,857 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4032.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 14:55:22,115 INFO [train.py:898] (1/4) Epoch 2, batch 400, loss[loss=0.3047, simple_loss=0.3554, pruned_loss=0.127, over 18262.00 frames. ], tot_loss[loss=0.3301, simple_loss=0.3808, pruned_loss=0.1397, over 3105154.64 frames. ], batch size: 45, lr: 4.33e-02, grad_scale: 8.0 +2023-03-08 14:56:22,108 INFO [train.py:898] (1/4) Epoch 2, batch 450, loss[loss=0.3183, simple_loss=0.3568, pruned_loss=0.1399, over 18497.00 frames. ], tot_loss[loss=0.3292, simple_loss=0.3799, pruned_loss=0.1392, over 3219387.97 frames. ], batch size: 44, lr: 4.31e-02, grad_scale: 8.0 +2023-03-08 14:56:56,419 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.730e+02 6.143e+02 7.834e+02 1.006e+03 1.697e+03, threshold=1.567e+03, percent-clipped=3.0 +2023-03-08 14:56:57,887 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7077, 3.1577, 1.1940, 3.5657, 2.5712, 4.0730, 2.1104, 3.5781], + device='cuda:1'), covar=tensor([0.0127, 0.0621, 0.1928, 0.0203, 0.0548, 0.0060, 0.1079, 0.0228], + device='cuda:1'), in_proj_covar=tensor([0.0039, 0.0066, 0.0069, 0.0035, 0.0052, 0.0036, 0.0065, 0.0052], + device='cuda:1'), out_proj_covar=tensor([3.2407e-05, 7.0276e-05, 7.6117e-05, 3.9430e-05, 4.7058e-05, 3.0704e-05, + 6.3976e-05, 4.7543e-05], device='cuda:1') +2023-03-08 14:57:21,301 INFO [train.py:898] (1/4) Epoch 2, batch 500, loss[loss=0.3341, simple_loss=0.3823, pruned_loss=0.1429, over 17659.00 frames. ], tot_loss[loss=0.3277, simple_loss=0.3786, pruned_loss=0.1384, over 3294951.38 frames. ], batch size: 70, lr: 4.30e-02, grad_scale: 8.0 +2023-03-08 14:57:28,801 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4141.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:58:19,880 INFO [train.py:898] (1/4) Epoch 2, batch 550, loss[loss=0.3464, simple_loss=0.3874, pruned_loss=0.1527, over 17311.00 frames. ], tot_loss[loss=0.3285, simple_loss=0.3794, pruned_loss=0.1388, over 3355694.39 frames. ], batch size: 78, lr: 4.29e-02, grad_scale: 8.0 +2023-03-08 14:58:42,180 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4202.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 14:58:42,296 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4202.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:58:56,103 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.937e+02 6.401e+02 7.607e+02 9.685e+02 1.732e+03, threshold=1.521e+03, percent-clipped=4.0 +2023-03-08 14:59:16,557 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.84 vs. limit=2.0 +2023-03-08 14:59:20,566 INFO [train.py:898] (1/4) Epoch 2, batch 600, loss[loss=0.3722, simple_loss=0.4154, pruned_loss=0.1645, over 18495.00 frames. ], tot_loss[loss=0.3248, simple_loss=0.3764, pruned_loss=0.1366, over 3413200.03 frames. ], batch size: 53, lr: 4.28e-02, grad_scale: 8.0 +2023-03-08 14:59:39,658 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4250.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:59:47,604 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=5.01 vs. limit=5.0 +2023-03-08 15:00:17,451 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4282.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:00:20,598 INFO [train.py:898] (1/4) Epoch 2, batch 650, loss[loss=0.3446, simple_loss=0.4006, pruned_loss=0.1443, over 18215.00 frames. ], tot_loss[loss=0.325, simple_loss=0.3767, pruned_loss=0.1366, over 3449268.75 frames. ], batch size: 60, lr: 4.27e-02, grad_scale: 8.0 +2023-03-08 15:00:24,340 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4288.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:00:34,838 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.33 vs. limit=5.0 +2023-03-08 15:00:55,826 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.352e+02 6.066e+02 7.398e+02 9.036e+02 1.375e+03, threshold=1.480e+03, percent-clipped=0.0 +2023-03-08 15:01:17,525 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4332.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 15:01:20,628 INFO [train.py:898] (1/4) Epoch 2, batch 700, loss[loss=0.334, simple_loss=0.3929, pruned_loss=0.1375, over 18259.00 frames. ], tot_loss[loss=0.325, simple_loss=0.3767, pruned_loss=0.1366, over 3468365.16 frames. ], batch size: 60, lr: 4.26e-02, grad_scale: 8.0 +2023-03-08 15:01:34,627 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.00 vs. limit=2.0 +2023-03-08 15:01:37,716 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4349.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:02:14,427 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4380.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 15:02:19,786 INFO [train.py:898] (1/4) Epoch 2, batch 750, loss[loss=0.2727, simple_loss=0.3267, pruned_loss=0.1093, over 17696.00 frames. ], tot_loss[loss=0.3259, simple_loss=0.3776, pruned_loss=0.137, over 3501279.61 frames. ], batch size: 39, lr: 4.25e-02, grad_scale: 8.0 +2023-03-08 15:02:20,118 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6487, 3.7591, 3.7006, 3.5199, 3.3830, 2.8085, 3.0749, 2.9219], + device='cuda:1'), covar=tensor([0.0433, 0.0258, 0.0262, 0.0118, 0.0201, 0.0342, 0.0280, 0.0225], + device='cuda:1'), in_proj_covar=tensor([0.0028, 0.0026, 0.0027, 0.0025, 0.0033, 0.0026, 0.0029, 0.0030], + device='cuda:1'), out_proj_covar=tensor([6.1927e-05, 6.7178e-05, 5.1642e-05, 5.4722e-05, 7.3063e-05, 5.6971e-05, + 5.6194e-05, 6.3966e-05], device='cuda:1') +2023-03-08 15:02:30,476 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9290, 3.9405, 4.3143, 3.7398, 2.7535, 3.9048, 4.3535, 2.6852], + device='cuda:1'), covar=tensor([0.0114, 0.0103, 0.0064, 0.0101, 0.0571, 0.0124, 0.0048, 0.0481], + device='cuda:1'), in_proj_covar=tensor([0.0036, 0.0033, 0.0032, 0.0031, 0.0059, 0.0033, 0.0027, 0.0054], + device='cuda:1'), out_proj_covar=tensor([3.4463e-05, 2.7962e-05, 2.8331e-05, 2.9950e-05, 5.2451e-05, 2.7043e-05, + 2.5311e-05, 4.9772e-05], device='cuda:1') +2023-03-08 15:02:54,752 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.989e+02 6.069e+02 8.253e+02 1.039e+03 2.142e+03, threshold=1.651e+03, percent-clipped=4.0 +2023-03-08 15:03:19,466 INFO [train.py:898] (1/4) Epoch 2, batch 800, loss[loss=0.3091, simple_loss=0.3728, pruned_loss=0.1227, over 18067.00 frames. ], tot_loss[loss=0.3243, simple_loss=0.3759, pruned_loss=0.1364, over 3523905.07 frames. ], batch size: 65, lr: 4.24e-02, grad_scale: 8.0 +2023-03-08 15:03:23,086 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8880, 4.9409, 4.7747, 4.6171, 4.6975, 5.1601, 4.9253, 4.8067], + device='cuda:1'), covar=tensor([0.0555, 0.0710, 0.0653, 0.0559, 0.1290, 0.0717, 0.0512, 0.1420], + device='cuda:1'), in_proj_covar=tensor([0.0155, 0.0134, 0.0128, 0.0115, 0.0170, 0.0167, 0.0116, 0.0174], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-03-08 15:03:57,063 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.6323, 5.9323, 5.7046, 5.6963, 5.1588, 5.8991, 6.0452, 5.8522], + device='cuda:1'), covar=tensor([0.0681, 0.0562, 0.0293, 0.0468, 0.1519, 0.0361, 0.0408, 0.0471], + device='cuda:1'), in_proj_covar=tensor([0.0201, 0.0200, 0.0148, 0.0194, 0.0245, 0.0177, 0.0183, 0.0168], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 15:04:19,436 INFO [train.py:898] (1/4) Epoch 2, batch 850, loss[loss=0.2657, simple_loss=0.3326, pruned_loss=0.09939, over 18559.00 frames. ], tot_loss[loss=0.3223, simple_loss=0.3747, pruned_loss=0.1349, over 3546703.16 frames. ], batch size: 45, lr: 4.23e-02, grad_scale: 8.0 +2023-03-08 15:04:33,210 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4497.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:04:53,483 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.476e+02 6.096e+02 7.477e+02 9.236e+02 1.546e+03, threshold=1.495e+03, percent-clipped=0.0 +2023-03-08 15:05:13,033 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3220, 5.1701, 4.4188, 5.3161, 5.1730, 4.7722, 5.1471, 4.6185], + device='cuda:1'), covar=tensor([0.0242, 0.0273, 0.1627, 0.0397, 0.0230, 0.0338, 0.0279, 0.0388], + device='cuda:1'), in_proj_covar=tensor([0.0151, 0.0161, 0.0245, 0.0137, 0.0136, 0.0167, 0.0162, 0.0181], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 15:05:18,321 INFO [train.py:898] (1/4) Epoch 2, batch 900, loss[loss=0.2828, simple_loss=0.3389, pruned_loss=0.1133, over 18162.00 frames. ], tot_loss[loss=0.3219, simple_loss=0.3744, pruned_loss=0.1347, over 3536592.79 frames. ], batch size: 44, lr: 4.22e-02, grad_scale: 8.0 +2023-03-08 15:06:13,593 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6492, 4.5738, 4.7016, 4.7657, 4.7022, 4.4856, 5.1073, 4.9521], + device='cuda:1'), covar=tensor([0.0119, 0.0149, 0.0184, 0.0112, 0.0150, 0.0149, 0.0102, 0.0122], + device='cuda:1'), in_proj_covar=tensor([0.0060, 0.0050, 0.0049, 0.0053, 0.0055, 0.0061, 0.0054, 0.0049], + device='cuda:1'), out_proj_covar=tensor([1.1667e-04, 8.2354e-05, 8.2508e-05, 8.3177e-05, 1.0123e-04, 1.1459e-04, + 9.2532e-05, 7.9684e-05], device='cuda:1') +2023-03-08 15:06:14,749 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4582.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:06:17,756 INFO [train.py:898] (1/4) Epoch 2, batch 950, loss[loss=0.2714, simple_loss=0.3358, pruned_loss=0.1035, over 18367.00 frames. ], tot_loss[loss=0.3221, simple_loss=0.3747, pruned_loss=0.1347, over 3541687.11 frames. ], batch size: 46, lr: 4.21e-02, grad_scale: 8.0 +2023-03-08 15:06:29,723 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.2756, 4.2351, 4.3116, 4.2084, 3.5849, 2.7598, 3.5294, 2.9984], + device='cuda:1'), covar=tensor([0.0228, 0.0213, 0.0160, 0.0080, 0.0223, 0.0335, 0.0171, 0.0297], + device='cuda:1'), in_proj_covar=tensor([0.0025, 0.0025, 0.0025, 0.0024, 0.0031, 0.0024, 0.0027, 0.0028], + device='cuda:1'), out_proj_covar=tensor([5.8961e-05, 6.9150e-05, 5.1306e-05, 5.4166e-05, 7.3393e-05, 5.4115e-05, + 5.4087e-05, 6.2502e-05], device='cuda:1') +2023-03-08 15:06:52,448 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.369e+02 6.106e+02 7.676e+02 9.311e+02 1.838e+03, threshold=1.535e+03, percent-clipped=6.0 +2023-03-08 15:07:11,510 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.80 vs. limit=2.0 +2023-03-08 15:07:12,163 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4630.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:07:18,133 INFO [train.py:898] (1/4) Epoch 2, batch 1000, loss[loss=0.3122, simple_loss=0.3756, pruned_loss=0.1244, over 18325.00 frames. ], tot_loss[loss=0.3208, simple_loss=0.3742, pruned_loss=0.1337, over 3551387.15 frames. ], batch size: 57, lr: 4.20e-02, grad_scale: 8.0 +2023-03-08 15:07:28,742 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4644.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:08:17,970 INFO [train.py:898] (1/4) Epoch 2, batch 1050, loss[loss=0.3151, simple_loss=0.361, pruned_loss=0.1346, over 18503.00 frames. ], tot_loss[loss=0.3186, simple_loss=0.3726, pruned_loss=0.1323, over 3556242.96 frames. ], batch size: 47, lr: 4.19e-02, grad_scale: 8.0 +2023-03-08 15:08:48,246 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.3333, 4.7423, 4.7607, 4.4778, 4.0835, 4.6253, 4.7312, 4.7424], + device='cuda:1'), covar=tensor([0.0817, 0.0519, 0.0487, 0.0544, 0.1429, 0.0469, 0.0487, 0.0439], + device='cuda:1'), in_proj_covar=tensor([0.0216, 0.0204, 0.0155, 0.0205, 0.0272, 0.0188, 0.0196, 0.0177], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-08 15:08:52,053 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.788e+02 5.479e+02 6.722e+02 8.127e+02 1.317e+03, threshold=1.344e+03, percent-clipped=0.0 +2023-03-08 15:09:02,375 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4121, 4.4080, 4.5226, 3.4924, 4.0820, 4.0766, 4.4304, 3.6227], + device='cuda:1'), covar=tensor([0.0544, 0.0209, 0.0113, 0.0330, 0.0130, 0.0416, 0.0188, 0.0519], + device='cuda:1'), in_proj_covar=tensor([0.0043, 0.0036, 0.0029, 0.0036, 0.0024, 0.0045, 0.0024, 0.0042], + device='cuda:1'), out_proj_covar=tensor([3.0477e-05, 2.5646e-05, 1.9128e-05, 2.5590e-05, 1.7852e-05, 3.3170e-05, + 1.7439e-05, 2.9376e-05], device='cuda:1') +2023-03-08 15:09:10,737 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9758, 3.0122, 1.4839, 3.3678, 2.9327, 4.0837, 1.6534, 3.6871], + device='cuda:1'), covar=tensor([0.0124, 0.1020, 0.2241, 0.0331, 0.0650, 0.0128, 0.1921, 0.0293], + device='cuda:1'), in_proj_covar=tensor([0.0051, 0.0086, 0.0087, 0.0044, 0.0070, 0.0044, 0.0084, 0.0069], + device='cuda:1'), out_proj_covar=tensor([4.4264e-05, 9.1842e-05, 9.6659e-05, 5.3552e-05, 6.8543e-05, 3.9876e-05, + 8.4955e-05, 6.6813e-05], device='cuda:1') +2023-03-08 15:09:17,325 INFO [train.py:898] (1/4) Epoch 2, batch 1100, loss[loss=0.3366, simple_loss=0.3906, pruned_loss=0.1413, over 16221.00 frames. ], tot_loss[loss=0.3152, simple_loss=0.3699, pruned_loss=0.1303, over 3567795.92 frames. ], batch size: 94, lr: 4.18e-02, grad_scale: 4.0 +2023-03-08 15:09:27,434 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1387, 4.1601, 4.2433, 4.1710, 4.1570, 4.0819, 4.5564, 4.3134], + device='cuda:1'), covar=tensor([0.0131, 0.0176, 0.0156, 0.0129, 0.0141, 0.0133, 0.0120, 0.0154], + device='cuda:1'), in_proj_covar=tensor([0.0060, 0.0051, 0.0049, 0.0054, 0.0054, 0.0061, 0.0053, 0.0050], + device='cuda:1'), out_proj_covar=tensor([1.1928e-04, 8.7146e-05, 8.5584e-05, 8.8354e-05, 1.0227e-04, 1.1852e-04, + 9.5134e-05, 8.5303e-05], device='cuda:1') +2023-03-08 15:09:36,718 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3040, 5.9030, 5.5424, 5.5033, 5.1897, 5.6920, 5.8892, 5.8085], + device='cuda:1'), covar=tensor([0.0812, 0.0536, 0.0320, 0.0551, 0.1483, 0.0393, 0.0408, 0.0504], + device='cuda:1'), in_proj_covar=tensor([0.0214, 0.0201, 0.0154, 0.0201, 0.0270, 0.0186, 0.0192, 0.0176], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-08 15:09:38,321 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.96 vs. limit=2.0 +2023-03-08 15:10:17,510 INFO [train.py:898] (1/4) Epoch 2, batch 1150, loss[loss=0.2896, simple_loss=0.3431, pruned_loss=0.1181, over 18246.00 frames. ], tot_loss[loss=0.3142, simple_loss=0.369, pruned_loss=0.1297, over 3573279.66 frames. ], batch size: 45, lr: 4.17e-02, grad_scale: 4.0 +2023-03-08 15:10:20,424 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.03 vs. limit=5.0 +2023-03-08 15:10:31,957 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4797.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:10:52,310 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.612e+02 6.045e+02 7.965e+02 9.549e+02 1.896e+03, threshold=1.593e+03, percent-clipped=3.0 +2023-03-08 15:11:16,912 INFO [train.py:898] (1/4) Epoch 2, batch 1200, loss[loss=0.3091, simple_loss=0.3665, pruned_loss=0.1259, over 18566.00 frames. ], tot_loss[loss=0.3155, simple_loss=0.3698, pruned_loss=0.1306, over 3572663.66 frames. ], batch size: 54, lr: 4.16e-02, grad_scale: 8.0 +2023-03-08 15:11:29,240 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4845.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:11:34,590 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.79 vs. limit=2.0 +2023-03-08 15:12:16,097 INFO [train.py:898] (1/4) Epoch 2, batch 1250, loss[loss=0.3354, simple_loss=0.3884, pruned_loss=0.1412, over 17789.00 frames. ], tot_loss[loss=0.3144, simple_loss=0.3689, pruned_loss=0.1299, over 3583961.41 frames. ], batch size: 70, lr: 4.15e-02, grad_scale: 8.0 +2023-03-08 15:12:29,731 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8739, 4.9067, 5.0912, 4.8264, 5.0259, 4.7238, 5.2157, 5.1243], + device='cuda:1'), covar=tensor([0.0111, 0.0143, 0.0141, 0.0110, 0.0099, 0.0122, 0.0144, 0.0103], + device='cuda:1'), in_proj_covar=tensor([0.0058, 0.0048, 0.0048, 0.0052, 0.0052, 0.0059, 0.0051, 0.0048], + device='cuda:1'), out_proj_covar=tensor([1.2025e-04, 8.7421e-05, 8.5305e-05, 8.6364e-05, 1.0036e-04, 1.1741e-04, + 9.4961e-05, 8.3960e-05], device='cuda:1') +2023-03-08 15:12:51,971 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.909e+02 5.792e+02 6.888e+02 8.674e+02 1.521e+03, threshold=1.378e+03, percent-clipped=0.0 +2023-03-08 15:13:11,078 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5529, 3.8993, 4.0075, 3.7036, 2.3361, 4.1671, 3.6664, 2.8199], + device='cuda:1'), covar=tensor([0.0126, 0.0107, 0.0065, 0.0113, 0.0816, 0.0070, 0.0080, 0.0619], + device='cuda:1'), in_proj_covar=tensor([0.0043, 0.0038, 0.0038, 0.0037, 0.0072, 0.0038, 0.0030, 0.0069], + device='cuda:1'), out_proj_covar=tensor([4.1720e-05, 3.5112e-05, 3.5730e-05, 3.6996e-05, 6.6774e-05, 3.2704e-05, + 3.1674e-05, 6.6116e-05], device='cuda:1') +2023-03-08 15:13:15,904 INFO [train.py:898] (1/4) Epoch 2, batch 1300, loss[loss=0.3445, simple_loss=0.3955, pruned_loss=0.1468, over 18572.00 frames. ], tot_loss[loss=0.315, simple_loss=0.3697, pruned_loss=0.1301, over 3590116.03 frames. ], batch size: 54, lr: 4.14e-02, grad_scale: 8.0 +2023-03-08 15:13:27,104 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4944.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:14:15,426 INFO [train.py:898] (1/4) Epoch 2, batch 1350, loss[loss=0.3203, simple_loss=0.3818, pruned_loss=0.1294, over 18207.00 frames. ], tot_loss[loss=0.3143, simple_loss=0.3694, pruned_loss=0.1296, over 3600290.89 frames. ], batch size: 60, lr: 4.13e-02, grad_scale: 8.0 +2023-03-08 15:14:24,035 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4992.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:14:27,440 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.45 vs. limit=5.0 +2023-03-08 15:14:51,638 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.370e+02 5.891e+02 6.846e+02 8.415e+02 1.418e+03, threshold=1.369e+03, percent-clipped=1.0 +2023-03-08 15:15:07,034 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5382, 4.6892, 3.5663, 4.4291, 4.3531, 4.6709, 4.3918, 3.1038], + device='cuda:1'), covar=tensor([0.0260, 0.0079, 0.0196, 0.0086, 0.0084, 0.0092, 0.0192, 0.0465], + device='cuda:1'), in_proj_covar=tensor([0.0037, 0.0022, 0.0032, 0.0024, 0.0028, 0.0025, 0.0028, 0.0042], + device='cuda:1'), out_proj_covar=tensor([8.9161e-05, 5.3072e-05, 8.0127e-05, 6.5085e-05, 6.0843e-05, 5.8088e-05, + 6.5489e-05, 9.3617e-05], device='cuda:1') +2023-03-08 15:15:15,345 INFO [train.py:898] (1/4) Epoch 2, batch 1400, loss[loss=0.2916, simple_loss=0.3444, pruned_loss=0.1194, over 18364.00 frames. ], tot_loss[loss=0.3149, simple_loss=0.3701, pruned_loss=0.1298, over 3592567.29 frames. ], batch size: 46, lr: 4.12e-02, grad_scale: 8.0 +2023-03-08 15:15:19,774 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1363, 3.9085, 4.3329, 3.1330, 4.0241, 3.7636, 4.1226, 3.4554], + device='cuda:1'), covar=tensor([0.0490, 0.0223, 0.0073, 0.0300, 0.0092, 0.0307, 0.0160, 0.0402], + device='cuda:1'), in_proj_covar=tensor([0.0048, 0.0042, 0.0032, 0.0041, 0.0029, 0.0054, 0.0028, 0.0049], + device='cuda:1'), out_proj_covar=tensor([3.6419e-05, 3.1305e-05, 2.2257e-05, 3.0844e-05, 2.2375e-05, 4.1091e-05, + 2.2459e-05, 3.5481e-05], device='cuda:1') +2023-03-08 15:16:12,495 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7690, 3.7489, 4.0031, 3.6760, 2.3977, 3.9273, 3.9375, 2.3696], + device='cuda:1'), covar=tensor([0.0124, 0.0144, 0.0068, 0.0141, 0.0803, 0.0128, 0.0095, 0.0730], + device='cuda:1'), in_proj_covar=tensor([0.0044, 0.0040, 0.0039, 0.0039, 0.0072, 0.0038, 0.0032, 0.0072], + device='cuda:1'), out_proj_covar=tensor([4.3450e-05, 3.6208e-05, 3.6913e-05, 3.8476e-05, 6.7646e-05, 3.3417e-05, + 3.3820e-05, 6.9066e-05], device='cuda:1') +2023-03-08 15:16:13,608 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7246, 4.6596, 3.9379, 4.7978, 4.7919, 4.3660, 4.7063, 4.0956], + device='cuda:1'), covar=tensor([0.0369, 0.0423, 0.2007, 0.0436, 0.0289, 0.0386, 0.0337, 0.0578], + device='cuda:1'), in_proj_covar=tensor([0.0166, 0.0178, 0.0282, 0.0144, 0.0150, 0.0182, 0.0178, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 15:16:14,506 INFO [train.py:898] (1/4) Epoch 2, batch 1450, loss[loss=0.3207, simple_loss=0.3789, pruned_loss=0.1312, over 17153.00 frames. ], tot_loss[loss=0.3151, simple_loss=0.3703, pruned_loss=0.13, over 3597168.88 frames. ], batch size: 78, lr: 4.11e-02, grad_scale: 8.0 +2023-03-08 15:16:36,491 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5579, 5.2601, 5.5105, 5.5682, 5.5223, 6.1405, 5.6760, 5.6315], + device='cuda:1'), covar=tensor([0.0595, 0.0515, 0.0574, 0.0432, 0.0900, 0.0488, 0.0505, 0.0952], + device='cuda:1'), in_proj_covar=tensor([0.0169, 0.0146, 0.0140, 0.0127, 0.0182, 0.0184, 0.0132, 0.0181], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 15:16:50,485 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.583e+02 6.040e+02 7.064e+02 8.729e+02 1.298e+03, threshold=1.413e+03, percent-clipped=0.0 +2023-03-08 15:17:13,677 INFO [train.py:898] (1/4) Epoch 2, batch 1500, loss[loss=0.3294, simple_loss=0.3707, pruned_loss=0.144, over 18384.00 frames. ], tot_loss[loss=0.3144, simple_loss=0.3694, pruned_loss=0.1297, over 3600325.51 frames. ], batch size: 50, lr: 4.10e-02, grad_scale: 8.0 +2023-03-08 15:18:11,879 INFO [train.py:898] (1/4) Epoch 2, batch 1550, loss[loss=0.3262, simple_loss=0.3684, pruned_loss=0.142, over 18406.00 frames. ], tot_loss[loss=0.314, simple_loss=0.3688, pruned_loss=0.1296, over 3594625.26 frames. ], batch size: 48, lr: 4.08e-02, grad_scale: 8.0 +2023-03-08 15:18:48,961 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.611e+02 6.188e+02 7.756e+02 9.428e+02 1.707e+03, threshold=1.551e+03, percent-clipped=5.0 +2023-03-08 15:19:11,997 INFO [train.py:898] (1/4) Epoch 2, batch 1600, loss[loss=0.3288, simple_loss=0.3894, pruned_loss=0.1341, over 18013.00 frames. ], tot_loss[loss=0.3121, simple_loss=0.3672, pruned_loss=0.1285, over 3596002.92 frames. ], batch size: 65, lr: 4.07e-02, grad_scale: 8.0 +2023-03-08 15:20:11,433 INFO [train.py:898] (1/4) Epoch 2, batch 1650, loss[loss=0.3245, simple_loss=0.384, pruned_loss=0.1325, over 18303.00 frames. ], tot_loss[loss=0.3095, simple_loss=0.3654, pruned_loss=0.1268, over 3600783.33 frames. ], batch size: 57, lr: 4.06e-02, grad_scale: 8.0 +2023-03-08 15:20:47,171 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.457e+02 5.814e+02 6.706e+02 8.624e+02 1.400e+03, threshold=1.341e+03, percent-clipped=0.0 +2023-03-08 15:21:10,524 INFO [train.py:898] (1/4) Epoch 2, batch 1700, loss[loss=0.3137, simple_loss=0.3732, pruned_loss=0.1271, over 18511.00 frames. ], tot_loss[loss=0.3098, simple_loss=0.3662, pruned_loss=0.1267, over 3599058.85 frames. ], batch size: 51, lr: 4.05e-02, grad_scale: 8.0 +2023-03-08 15:21:22,042 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7633, 4.6351, 2.9255, 4.3223, 4.6605, 2.4575, 4.0677, 3.4295], + device='cuda:1'), covar=tensor([0.0058, 0.0122, 0.1687, 0.0166, 0.0061, 0.1312, 0.0414, 0.0804], + device='cuda:1'), in_proj_covar=tensor([0.0059, 0.0056, 0.0151, 0.0077, 0.0055, 0.0120, 0.0110, 0.0105], + device='cuda:1'), out_proj_covar=tensor([5.6614e-05, 6.4371e-05, 1.3751e-04, 7.0641e-05, 5.1318e-05, 1.1454e-04, + 1.0901e-04, 1.0972e-04], device='cuda:1') +2023-03-08 15:21:32,578 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5903, 6.0321, 5.6852, 5.7722, 5.3160, 5.9438, 6.1270, 5.9918], + device='cuda:1'), covar=tensor([0.0830, 0.0524, 0.0271, 0.0516, 0.1640, 0.0383, 0.0358, 0.0457], + device='cuda:1'), in_proj_covar=tensor([0.0244, 0.0217, 0.0168, 0.0220, 0.0304, 0.0211, 0.0208, 0.0191], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 15:22:10,168 INFO [train.py:898] (1/4) Epoch 2, batch 1750, loss[loss=0.3172, simple_loss=0.3716, pruned_loss=0.1313, over 18368.00 frames. ], tot_loss[loss=0.3088, simple_loss=0.3655, pruned_loss=0.126, over 3596963.04 frames. ], batch size: 50, lr: 4.04e-02, grad_scale: 8.0 +2023-03-08 15:22:46,219 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.642e+02 5.505e+02 6.727e+02 8.573e+02 1.772e+03, threshold=1.345e+03, percent-clipped=4.0 +2023-03-08 15:23:02,565 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1491, 5.6359, 5.5095, 5.3661, 4.8303, 5.5485, 5.7047, 5.5020], + device='cuda:1'), covar=tensor([0.0987, 0.0612, 0.0336, 0.0647, 0.2091, 0.0460, 0.0506, 0.0624], + device='cuda:1'), in_proj_covar=tensor([0.0244, 0.0215, 0.0167, 0.0224, 0.0305, 0.0213, 0.0213, 0.0194], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 15:23:10,302 INFO [train.py:898] (1/4) Epoch 2, batch 1800, loss[loss=0.2824, simple_loss=0.332, pruned_loss=0.1164, over 18419.00 frames. ], tot_loss[loss=0.3068, simple_loss=0.364, pruned_loss=0.1248, over 3607622.73 frames. ], batch size: 43, lr: 4.03e-02, grad_scale: 8.0 +2023-03-08 15:23:10,707 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([1.6123, 4.9486, 4.9872, 4.0968, 2.3634, 2.2165, 4.4810, 4.6788], + device='cuda:1'), covar=tensor([0.0929, 0.0124, 0.0043, 0.0143, 0.0882, 0.1042, 0.0123, 0.0025], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0033, 0.0031, 0.0053, 0.0080, 0.0088, 0.0054, 0.0031], + device='cuda:1'), out_proj_covar=tensor([1.0076e-04, 5.6878e-05, 4.1936e-05, 7.1928e-05, 1.0379e-04, 1.1501e-04, + 7.1944e-05, 4.1377e-05], device='cuda:1') +2023-03-08 15:24:09,631 INFO [train.py:898] (1/4) Epoch 2, batch 1850, loss[loss=0.2735, simple_loss=0.3346, pruned_loss=0.1062, over 18503.00 frames. ], tot_loss[loss=0.305, simple_loss=0.3625, pruned_loss=0.1238, over 3613048.41 frames. ], batch size: 47, lr: 4.02e-02, grad_scale: 8.0 +2023-03-08 15:24:45,053 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.702e+02 5.666e+02 6.741e+02 8.273e+02 2.014e+03, threshold=1.348e+03, percent-clipped=2.0 +2023-03-08 15:24:59,604 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.03 vs. limit=5.0 +2023-03-08 15:25:08,314 INFO [train.py:898] (1/4) Epoch 2, batch 1900, loss[loss=0.276, simple_loss=0.3435, pruned_loss=0.1042, over 18398.00 frames. ], tot_loss[loss=0.3051, simple_loss=0.3625, pruned_loss=0.1239, over 3613125.77 frames. ], batch size: 52, lr: 4.01e-02, grad_scale: 8.0 +2023-03-08 15:26:07,475 INFO [train.py:898] (1/4) Epoch 2, batch 1950, loss[loss=0.3101, simple_loss=0.3693, pruned_loss=0.1255, over 17793.00 frames. ], tot_loss[loss=0.3035, simple_loss=0.3611, pruned_loss=0.123, over 3601990.72 frames. ], batch size: 70, lr: 4.00e-02, grad_scale: 8.0 +2023-03-08 15:26:42,652 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.871e+02 5.827e+02 7.380e+02 9.024e+02 1.685e+03, threshold=1.476e+03, percent-clipped=2.0 +2023-03-08 15:26:50,175 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-08 15:27:06,899 INFO [train.py:898] (1/4) Epoch 2, batch 2000, loss[loss=0.3111, simple_loss=0.3787, pruned_loss=0.1217, over 18313.00 frames. ], tot_loss[loss=0.3023, simple_loss=0.3604, pruned_loss=0.1222, over 3604311.90 frames. ], batch size: 57, lr: 3.99e-02, grad_scale: 8.0 +2023-03-08 15:27:24,057 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5650.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 15:27:37,762 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5480, 5.4597, 4.7036, 5.4754, 5.4561, 4.9542, 5.3969, 4.7943], + device='cuda:1'), covar=tensor([0.0283, 0.0248, 0.2068, 0.0551, 0.0230, 0.0346, 0.0335, 0.0506], + device='cuda:1'), in_proj_covar=tensor([0.0171, 0.0182, 0.0299, 0.0146, 0.0148, 0.0182, 0.0182, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 15:27:44,421 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8562, 3.8284, 4.3242, 2.6500, 4.0800, 3.8784, 3.7889, 2.7249], + device='cuda:1'), covar=tensor([0.0586, 0.0237, 0.0086, 0.0447, 0.0133, 0.0434, 0.0262, 0.0688], + device='cuda:1'), in_proj_covar=tensor([0.0060, 0.0050, 0.0036, 0.0050, 0.0038, 0.0073, 0.0032, 0.0064], + device='cuda:1'), out_proj_covar=tensor([4.9155e-05, 4.0844e-05, 2.7572e-05, 4.1414e-05, 3.2439e-05, 5.9013e-05, + 2.9601e-05, 5.0450e-05], device='cuda:1') +2023-03-08 15:28:05,548 INFO [train.py:898] (1/4) Epoch 2, batch 2050, loss[loss=0.3167, simple_loss=0.3776, pruned_loss=0.1279, over 18502.00 frames. ], tot_loss[loss=0.3041, simple_loss=0.3619, pruned_loss=0.1232, over 3599577.16 frames. ], batch size: 51, lr: 3.98e-02, grad_scale: 8.0 +2023-03-08 15:28:35,631 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5711.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 15:28:42,464 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.372e+02 5.884e+02 6.946e+02 9.338e+02 2.315e+03, threshold=1.389e+03, percent-clipped=7.0 +2023-03-08 15:29:03,492 INFO [train.py:898] (1/4) Epoch 2, batch 2100, loss[loss=0.3719, simple_loss=0.4025, pruned_loss=0.1706, over 12346.00 frames. ], tot_loss[loss=0.3032, simple_loss=0.3612, pruned_loss=0.1226, over 3601353.20 frames. ], batch size: 130, lr: 3.97e-02, grad_scale: 2.0 +2023-03-08 15:29:27,872 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5755.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:30:02,847 INFO [train.py:898] (1/4) Epoch 2, batch 2150, loss[loss=0.3759, simple_loss=0.406, pruned_loss=0.1728, over 12573.00 frames. ], tot_loss[loss=0.3006, simple_loss=0.3588, pruned_loss=0.1212, over 3603727.16 frames. ], batch size: 129, lr: 3.96e-02, grad_scale: 2.0 +2023-03-08 15:30:40,450 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5816.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:30:41,160 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.344e+02 5.674e+02 6.514e+02 8.678e+02 1.455e+03, threshold=1.303e+03, percent-clipped=1.0 +2023-03-08 15:30:58,858 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.4756, 4.3900, 2.6247, 4.3880, 4.2339, 2.2892, 4.0443, 3.4820], + device='cuda:1'), covar=tensor([0.0075, 0.0107, 0.1479, 0.0149, 0.0047, 0.1284, 0.0383, 0.0697], + device='cuda:1'), in_proj_covar=tensor([0.0064, 0.0058, 0.0153, 0.0081, 0.0056, 0.0125, 0.0118, 0.0109], + device='cuda:1'), out_proj_covar=tensor([6.4456e-05, 7.2405e-05, 1.4222e-04, 7.9325e-05, 5.5738e-05, 1.2154e-04, + 1.1984e-04, 1.1731e-04], device='cuda:1') +2023-03-08 15:31:01,977 INFO [train.py:898] (1/4) Epoch 2, batch 2200, loss[loss=0.2842, simple_loss=0.3508, pruned_loss=0.1088, over 18552.00 frames. ], tot_loss[loss=0.3014, simple_loss=0.3593, pruned_loss=0.1218, over 3597651.69 frames. ], batch size: 49, lr: 3.95e-02, grad_scale: 2.0 +2023-03-08 15:31:29,442 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5858.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:31:51,228 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.95 vs. limit=2.0 +2023-03-08 15:32:00,602 INFO [train.py:898] (1/4) Epoch 2, batch 2250, loss[loss=0.3012, simple_loss=0.366, pruned_loss=0.1182, over 18355.00 frames. ], tot_loss[loss=0.301, simple_loss=0.3591, pruned_loss=0.1214, over 3595968.48 frames. ], batch size: 55, lr: 3.95e-02, grad_scale: 2.0 +2023-03-08 15:32:05,980 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.27 vs. limit=5.0 +2023-03-08 15:32:38,782 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.697e+02 5.715e+02 7.226e+02 9.109e+02 2.021e+03, threshold=1.445e+03, percent-clipped=5.0 +2023-03-08 15:32:41,315 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5919.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:32:59,646 INFO [train.py:898] (1/4) Epoch 2, batch 2300, loss[loss=0.3953, simple_loss=0.4212, pruned_loss=0.1847, over 12650.00 frames. ], tot_loss[loss=0.3013, simple_loss=0.3595, pruned_loss=0.1215, over 3599447.85 frames. ], batch size: 129, lr: 3.94e-02, grad_scale: 2.0 +2023-03-08 15:33:09,338 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9603, 3.7000, 3.6475, 3.3842, 1.8394, 3.8897, 3.7748, 2.1967], + device='cuda:1'), covar=tensor([0.0136, 0.0183, 0.0126, 0.0183, 0.1224, 0.0140, 0.0102, 0.0949], + device='cuda:1'), in_proj_covar=tensor([0.0054, 0.0051, 0.0049, 0.0045, 0.0094, 0.0046, 0.0039, 0.0089], + device='cuda:1'), out_proj_covar=tensor([5.3906e-05, 4.8118e-05, 4.8173e-05, 4.6469e-05, 8.9463e-05, 4.2076e-05, + 4.3884e-05, 8.6518e-05], device='cuda:1') +2023-03-08 15:33:58,221 INFO [train.py:898] (1/4) Epoch 2, batch 2350, loss[loss=0.2917, simple_loss=0.3554, pruned_loss=0.114, over 18468.00 frames. ], tot_loss[loss=0.3025, simple_loss=0.3604, pruned_loss=0.1223, over 3587031.42 frames. ], batch size: 59, lr: 3.93e-02, grad_scale: 2.0 +2023-03-08 15:34:04,337 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5990.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:34:27,117 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6006.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 15:34:39,271 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.355e+02 5.403e+02 7.312e+02 8.931e+02 1.402e+03, threshold=1.462e+03, percent-clipped=0.0 +2023-03-08 15:34:55,572 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4912, 5.1628, 5.4763, 5.3278, 5.3778, 5.9829, 5.5146, 5.3730], + device='cuda:1'), covar=tensor([0.0534, 0.0536, 0.0500, 0.0450, 0.0946, 0.0561, 0.0435, 0.1161], + device='cuda:1'), in_proj_covar=tensor([0.0175, 0.0137, 0.0141, 0.0129, 0.0181, 0.0191, 0.0130, 0.0195], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:1') +2023-03-08 15:35:00,406 INFO [train.py:898] (1/4) Epoch 2, batch 2400, loss[loss=0.283, simple_loss=0.3525, pruned_loss=0.1068, over 18627.00 frames. ], tot_loss[loss=0.3022, simple_loss=0.3601, pruned_loss=0.1222, over 3589503.25 frames. ], batch size: 52, lr: 3.92e-02, grad_scale: 4.0 +2023-03-08 15:35:10,124 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6043.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:35:19,661 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6051.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:35:22,647 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2186, 4.2724, 4.0910, 4.1324, 2.5325, 4.0416, 4.1342, 2.5531], + device='cuda:1'), covar=tensor([0.0080, 0.0059, 0.0069, 0.0082, 0.0856, 0.0085, 0.0104, 0.0737], + device='cuda:1'), in_proj_covar=tensor([0.0056, 0.0050, 0.0050, 0.0045, 0.0095, 0.0047, 0.0039, 0.0091], + device='cuda:1'), out_proj_covar=tensor([5.4652e-05, 4.7650e-05, 4.9219e-05, 4.6489e-05, 9.0197e-05, 4.3512e-05, + 4.4787e-05, 8.8123e-05], device='cuda:1') +2023-03-08 15:35:52,634 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.04 vs. limit=2.0 +2023-03-08 15:35:59,394 INFO [train.py:898] (1/4) Epoch 2, batch 2450, loss[loss=0.2711, simple_loss=0.3389, pruned_loss=0.1017, over 18398.00 frames. ], tot_loss[loss=0.3003, simple_loss=0.3584, pruned_loss=0.1211, over 3592793.78 frames. ], batch size: 48, lr: 3.91e-02, grad_scale: 4.0 +2023-03-08 15:36:11,978 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.07 vs. limit=2.0 +2023-03-08 15:36:23,083 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6104.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:36:30,673 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6111.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:36:37,241 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.501e+02 5.639e+02 7.028e+02 8.696e+02 2.308e+03, threshold=1.406e+03, percent-clipped=2.0 +2023-03-08 15:36:58,341 INFO [train.py:898] (1/4) Epoch 2, batch 2500, loss[loss=0.2943, simple_loss=0.3528, pruned_loss=0.1179, over 18362.00 frames. ], tot_loss[loss=0.2997, simple_loss=0.3577, pruned_loss=0.1208, over 3583395.72 frames. ], batch size: 50, lr: 3.90e-02, grad_scale: 4.0 +2023-03-08 15:36:58,729 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6135.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:37:33,690 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.71 vs. limit=5.0 +2023-03-08 15:37:39,798 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6170.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:37:56,480 INFO [train.py:898] (1/4) Epoch 2, batch 2550, loss[loss=0.3201, simple_loss=0.3838, pruned_loss=0.1282, over 16294.00 frames. ], tot_loss[loss=0.3001, simple_loss=0.3583, pruned_loss=0.121, over 3588824.57 frames. ], batch size: 94, lr: 3.89e-02, grad_scale: 4.0 +2023-03-08 15:38:10,597 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6196.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:38:18,824 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6203.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:38:22,759 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.05 vs. limit=2.0 +2023-03-08 15:38:28,936 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6211.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:38:32,252 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6214.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:38:35,433 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 5.777e+02 7.159e+02 8.638e+02 1.810e+03, threshold=1.432e+03, percent-clipped=8.0 +2023-03-08 15:38:51,671 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6231.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:38:56,004 INFO [train.py:898] (1/4) Epoch 2, batch 2600, loss[loss=0.2399, simple_loss=0.3094, pruned_loss=0.08519, over 18244.00 frames. ], tot_loss[loss=0.3005, simple_loss=0.3591, pruned_loss=0.121, over 3586169.89 frames. ], batch size: 45, lr: 3.88e-02, grad_scale: 4.0 +2023-03-08 15:39:11,345 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-08 15:39:31,844 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6264.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:39:34,099 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6266.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:39:40,852 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6272.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:39:55,329 INFO [train.py:898] (1/4) Epoch 2, batch 2650, loss[loss=0.2619, simple_loss=0.3155, pruned_loss=0.1041, over 18396.00 frames. ], tot_loss[loss=0.3003, simple_loss=0.359, pruned_loss=0.1208, over 3585575.27 frames. ], batch size: 43, lr: 3.87e-02, grad_scale: 4.0 +2023-03-08 15:40:20,568 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6306.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 15:40:33,297 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.735e+02 5.474e+02 6.730e+02 8.945e+02 2.130e+03, threshold=1.346e+03, percent-clipped=7.0 +2023-03-08 15:40:45,865 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6327.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:40:54,525 INFO [train.py:898] (1/4) Epoch 2, batch 2700, loss[loss=0.2633, simple_loss=0.321, pruned_loss=0.1029, over 18145.00 frames. ], tot_loss[loss=0.3008, simple_loss=0.3593, pruned_loss=0.1212, over 3572829.68 frames. ], batch size: 44, lr: 3.86e-02, grad_scale: 4.0 +2023-03-08 15:41:07,620 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6346.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:41:17,122 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6354.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 15:41:53,027 INFO [train.py:898] (1/4) Epoch 2, batch 2750, loss[loss=0.3006, simple_loss=0.3616, pruned_loss=0.1198, over 18644.00 frames. ], tot_loss[loss=0.2997, simple_loss=0.3581, pruned_loss=0.1206, over 3573676.86 frames. ], batch size: 52, lr: 3.85e-02, grad_scale: 4.0 +2023-03-08 15:42:09,990 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6399.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:42:24,454 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6411.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:42:31,298 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.682e+02 5.887e+02 7.195e+02 9.085e+02 1.925e+03, threshold=1.439e+03, percent-clipped=3.0 +2023-03-08 15:42:37,963 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=2.06 vs. limit=2.0 +2023-03-08 15:42:52,440 INFO [train.py:898] (1/4) Epoch 2, batch 2800, loss[loss=0.3363, simple_loss=0.3822, pruned_loss=0.1452, over 13198.00 frames. ], tot_loss[loss=0.2998, simple_loss=0.3584, pruned_loss=0.1206, over 3574904.31 frames. ], batch size: 130, lr: 3.84e-02, grad_scale: 8.0 +2023-03-08 15:42:59,700 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4772, 4.5329, 2.7569, 4.3106, 4.2754, 4.5318, 4.3415, 2.6608], + device='cuda:1'), covar=tensor([0.0200, 0.0063, 0.0459, 0.0081, 0.0062, 0.0050, 0.0098, 0.0788], + device='cuda:1'), in_proj_covar=tensor([0.0042, 0.0026, 0.0048, 0.0029, 0.0034, 0.0028, 0.0035, 0.0061], + device='cuda:1'), out_proj_covar=tensor([1.3619e-04, 8.6497e-05, 1.4761e-04, 1.0909e-04, 9.7252e-05, 8.6568e-05, + 1.0740e-04, 1.6315e-04], device='cuda:1') +2023-03-08 15:43:21,409 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6459.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:43:25,083 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6462.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:43:28,249 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6971, 5.0037, 3.0150, 4.7691, 4.5804, 5.0891, 4.7975, 2.3208], + device='cuda:1'), covar=tensor([0.0225, 0.0063, 0.0498, 0.0080, 0.0107, 0.0062, 0.0120, 0.1154], + device='cuda:1'), in_proj_covar=tensor([0.0042, 0.0027, 0.0049, 0.0030, 0.0034, 0.0028, 0.0035, 0.0062], + device='cuda:1'), out_proj_covar=tensor([1.3871e-04, 8.9017e-05, 1.4985e-04, 1.1057e-04, 9.9810e-05, 8.8625e-05, + 1.0885e-04, 1.6773e-04], device='cuda:1') +2023-03-08 15:43:30,931 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6467.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:43:51,082 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7635, 3.2620, 4.4063, 2.7479, 3.3178, 4.0609, 4.2182, 4.1171], + device='cuda:1'), covar=tensor([0.0263, 0.0353, 0.0128, 0.0541, 0.1135, 0.0052, 0.0143, 0.0181], + device='cuda:1'), in_proj_covar=tensor([0.0075, 0.0080, 0.0051, 0.0082, 0.0149, 0.0058, 0.0068, 0.0072], + device='cuda:1'), out_proj_covar=tensor([5.5164e-05, 5.8882e-05, 3.9311e-05, 6.1263e-05, 1.1003e-04, 3.8029e-05, + 4.7898e-05, 5.2749e-05], device='cuda:1') +2023-03-08 15:43:51,684 INFO [train.py:898] (1/4) Epoch 2, batch 2850, loss[loss=0.3148, simple_loss=0.371, pruned_loss=0.1293, over 18563.00 frames. ], tot_loss[loss=0.2994, simple_loss=0.3582, pruned_loss=0.1203, over 3583010.76 frames. ], batch size: 54, lr: 3.83e-02, grad_scale: 8.0 +2023-03-08 15:43:58,850 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6491.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:44:26,144 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6514.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:44:29,249 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.462e+02 5.681e+02 6.833e+02 8.085e+02 2.080e+03, threshold=1.367e+03, percent-clipped=2.0 +2023-03-08 15:44:37,074 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6523.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 15:44:40,180 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6526.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:44:43,183 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6528.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:44:44,512 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.74 vs. limit=2.0 +2023-03-08 15:44:45,989 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.61 vs. limit=2.0 +2023-03-08 15:44:50,788 INFO [train.py:898] (1/4) Epoch 2, batch 2900, loss[loss=0.3343, simple_loss=0.3904, pruned_loss=0.1391, over 18281.00 frames. ], tot_loss[loss=0.3001, simple_loss=0.3587, pruned_loss=0.1208, over 3581832.37 frames. ], batch size: 57, lr: 3.82e-02, grad_scale: 8.0 +2023-03-08 15:45:19,408 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6559.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:45:22,841 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6562.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:45:28,537 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6567.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:45:49,913 INFO [train.py:898] (1/4) Epoch 2, batch 2950, loss[loss=0.2558, simple_loss=0.3168, pruned_loss=0.09742, over 17594.00 frames. ], tot_loss[loss=0.2974, simple_loss=0.3564, pruned_loss=0.1192, over 3594469.55 frames. ], batch size: 39, lr: 3.81e-02, grad_scale: 8.0 +2023-03-08 15:46:27,988 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.426e+02 5.603e+02 6.658e+02 8.574e+02 1.720e+03, threshold=1.332e+03, percent-clipped=3.0 +2023-03-08 15:46:33,937 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6622.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:46:42,608 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6629.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:46:49,589 INFO [train.py:898] (1/4) Epoch 2, batch 3000, loss[loss=0.2454, simple_loss=0.3132, pruned_loss=0.08878, over 18365.00 frames. ], tot_loss[loss=0.2941, simple_loss=0.3537, pruned_loss=0.1173, over 3601435.68 frames. ], batch size: 46, lr: 3.80e-02, grad_scale: 8.0 +2023-03-08 15:46:49,589 INFO [train.py:923] (1/4) Computing validation loss +2023-03-08 15:47:01,156 INFO [train.py:932] (1/4) Epoch 2, validation: loss=0.2202, simple_loss=0.3188, pruned_loss=0.06074, over 944034.00 frames. +2023-03-08 15:47:01,157 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19527MB +2023-03-08 15:47:10,984 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.79 vs. limit=2.0 +2023-03-08 15:47:14,430 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6646.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:47:59,747 INFO [train.py:898] (1/4) Epoch 2, batch 3050, loss[loss=0.2749, simple_loss=0.3546, pruned_loss=0.09765, over 18415.00 frames. ], tot_loss[loss=0.2957, simple_loss=0.3552, pruned_loss=0.1182, over 3603637.08 frames. ], batch size: 52, lr: 3.79e-02, grad_scale: 8.0 +2023-03-08 15:48:05,939 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6690.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:48:10,412 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6694.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:48:16,817 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6699.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:48:37,359 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0520, 2.3568, 3.6104, 2.7490, 2.9222, 3.4080, 3.1074, 3.1733], + device='cuda:1'), covar=tensor([0.0221, 0.0379, 0.0091, 0.0400, 0.0975, 0.0085, 0.0208, 0.0193], + device='cuda:1'), in_proj_covar=tensor([0.0079, 0.0085, 0.0052, 0.0086, 0.0158, 0.0060, 0.0071, 0.0077], + device='cuda:1'), out_proj_covar=tensor([5.9472e-05, 6.3398e-05, 4.0163e-05, 6.5508e-05, 1.1769e-04, 3.9432e-05, + 5.1218e-05, 5.8653e-05], device='cuda:1') +2023-03-08 15:48:37,953 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.591e+02 5.592e+02 6.340e+02 8.473e+02 1.907e+03, threshold=1.268e+03, percent-clipped=6.0 +2023-03-08 15:48:58,814 INFO [train.py:898] (1/4) Epoch 2, batch 3100, loss[loss=0.3318, simple_loss=0.3882, pruned_loss=0.1377, over 18486.00 frames. ], tot_loss[loss=0.2948, simple_loss=0.3542, pruned_loss=0.1177, over 3603634.65 frames. ], batch size: 51, lr: 3.79e-02, grad_scale: 8.0 +2023-03-08 15:49:12,527 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6747.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:49:57,575 INFO [train.py:898] (1/4) Epoch 2, batch 3150, loss[loss=0.3091, simple_loss=0.3701, pruned_loss=0.1241, over 18399.00 frames. ], tot_loss[loss=0.2957, simple_loss=0.3551, pruned_loss=0.1182, over 3593685.01 frames. ], batch size: 52, lr: 3.78e-02, grad_scale: 8.0 +2023-03-08 15:50:04,819 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6791.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:50:26,007 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6863, 4.6746, 2.8628, 4.2724, 4.4640, 4.6999, 4.4882, 2.4949], + device='cuda:1'), covar=tensor([0.0234, 0.0075, 0.0573, 0.0107, 0.0073, 0.0076, 0.0123, 0.1075], + device='cuda:1'), in_proj_covar=tensor([0.0044, 0.0028, 0.0052, 0.0032, 0.0036, 0.0029, 0.0037, 0.0066], + device='cuda:1'), out_proj_covar=tensor([1.5318e-04, 9.6971e-05, 1.6585e-04, 1.2368e-04, 1.0904e-04, 9.7978e-05, + 1.2000e-04, 1.8607e-04], device='cuda:1') +2023-03-08 15:50:36,057 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.441e+02 5.682e+02 6.955e+02 9.535e+02 1.991e+03, threshold=1.391e+03, percent-clipped=9.0 +2023-03-08 15:50:37,930 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6818.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 15:50:43,372 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6823.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:50:43,564 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8207, 2.5566, 1.9895, 2.4087, 2.6093, 2.0394, 2.0705, 2.9248], + device='cuda:1'), covar=tensor([0.0188, 0.0212, 0.0601, 0.0249, 0.0341, 0.0459, 0.0507, 0.0284], + device='cuda:1'), in_proj_covar=tensor([0.0037, 0.0038, 0.0043, 0.0045, 0.0038, 0.0056, 0.0064, 0.0038], + device='cuda:1'), out_proj_covar=tensor([5.0567e-05, 5.1290e-05, 6.5240e-05, 6.0031e-05, 5.3527e-05, 7.7828e-05, + 8.9650e-05, 5.8875e-05], device='cuda:1') +2023-03-08 15:50:46,938 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6826.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:50:57,112 INFO [train.py:898] (1/4) Epoch 2, batch 3200, loss[loss=0.3091, simple_loss=0.3649, pruned_loss=0.1266, over 18490.00 frames. ], tot_loss[loss=0.2942, simple_loss=0.3541, pruned_loss=0.1172, over 3584859.96 frames. ], batch size: 53, lr: 3.77e-02, grad_scale: 8.0 +2023-03-08 15:51:02,041 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6839.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:51:25,982 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6859.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:51:35,087 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6867.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:51:43,490 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6874.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:51:48,492 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6878.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 15:51:56,410 INFO [train.py:898] (1/4) Epoch 2, batch 3250, loss[loss=0.3021, simple_loss=0.3657, pruned_loss=0.1193, over 18062.00 frames. ], tot_loss[loss=0.2928, simple_loss=0.353, pruned_loss=0.1163, over 3592315.13 frames. ], batch size: 62, lr: 3.76e-02, grad_scale: 8.0 +2023-03-08 15:52:22,342 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6907.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:52:25,373 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6909.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:52:31,964 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6915.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:52:34,074 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.634e+02 5.392e+02 6.712e+02 8.838e+02 2.461e+03, threshold=1.342e+03, percent-clipped=3.0 +2023-03-08 15:52:39,952 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6922.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:52:54,799 INFO [train.py:898] (1/4) Epoch 2, batch 3300, loss[loss=0.2794, simple_loss=0.3459, pruned_loss=0.1065, over 18025.00 frames. ], tot_loss[loss=0.2917, simple_loss=0.3522, pruned_loss=0.1157, over 3592678.40 frames. ], batch size: 65, lr: 3.75e-02, grad_scale: 8.0 +2023-03-08 15:53:00,190 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4872, 2.9936, 1.8474, 3.6358, 2.7142, 3.7169, 2.0914, 3.4341], + device='cuda:1'), covar=tensor([0.0290, 0.0680, 0.1281, 0.0276, 0.0762, 0.0113, 0.1115, 0.0301], + device='cuda:1'), in_proj_covar=tensor([0.0082, 0.0120, 0.0123, 0.0073, 0.0113, 0.0057, 0.0118, 0.0107], + device='cuda:1'), out_proj_covar=tensor([1.0521e-04, 1.3834e-04, 1.3897e-04, 1.0846e-04, 1.3433e-04, 6.7122e-05, + 1.3200e-04, 1.1983e-04], device='cuda:1') +2023-03-08 15:53:00,209 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6939.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 15:53:01,327 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7108, 2.5698, 4.0311, 2.7217, 3.1317, 4.0674, 3.9140, 3.5700], + device='cuda:1'), covar=tensor([0.0201, 0.0403, 0.0075, 0.0474, 0.1085, 0.0041, 0.0134, 0.0168], + device='cuda:1'), in_proj_covar=tensor([0.0079, 0.0087, 0.0053, 0.0088, 0.0161, 0.0060, 0.0073, 0.0076], + device='cuda:1'), out_proj_covar=tensor([6.1758e-05, 6.6122e-05, 4.2048e-05, 6.8159e-05, 1.2143e-04, 4.0018e-05, + 5.4002e-05, 5.6667e-05], device='cuda:1') +2023-03-08 15:53:36,604 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6970.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:53:36,777 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6970.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:53:54,235 INFO [train.py:898] (1/4) Epoch 2, batch 3350, loss[loss=0.3186, simple_loss=0.3765, pruned_loss=0.1304, over 17154.00 frames. ], tot_loss[loss=0.2915, simple_loss=0.352, pruned_loss=0.1155, over 3583138.74 frames. ], batch size: 78, lr: 3.74e-02, grad_scale: 8.0 +2023-03-08 15:53:54,389 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6985.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:54:18,000 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4736, 5.4152, 4.7264, 5.4723, 5.4725, 4.8799, 5.3458, 4.8763], + device='cuda:1'), covar=tensor([0.0307, 0.0254, 0.1924, 0.0502, 0.0258, 0.0358, 0.0322, 0.0530], + device='cuda:1'), in_proj_covar=tensor([0.0194, 0.0209, 0.0346, 0.0168, 0.0166, 0.0201, 0.0211, 0.0238], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-08 15:54:19,730 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.76 vs. limit=2.0 +2023-03-08 15:54:32,197 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([1.6978, 4.4355, 4.4258, 4.0921, 3.0203, 2.1112, 4.2629, 4.8768], + device='cuda:1'), covar=tensor([0.1209, 0.0174, 0.0059, 0.0173, 0.0897, 0.1259, 0.0214, 0.0026], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0043, 0.0040, 0.0068, 0.0104, 0.0110, 0.0070, 0.0035], + device='cuda:1'), out_proj_covar=tensor([1.4079e-04, 8.0605e-05, 6.0119e-05, 1.0111e-04, 1.4641e-04, 1.5803e-04, + 1.0442e-04, 5.2166e-05], device='cuda:1') +2023-03-08 15:54:32,865 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.449e+02 5.349e+02 6.526e+02 8.040e+02 1.400e+03, threshold=1.305e+03, percent-clipped=1.0 +2023-03-08 15:54:53,424 INFO [train.py:898] (1/4) Epoch 2, batch 3400, loss[loss=0.3512, simple_loss=0.4002, pruned_loss=0.1511, over 18364.00 frames. ], tot_loss[loss=0.2913, simple_loss=0.3514, pruned_loss=0.1156, over 3585207.56 frames. ], batch size: 56, lr: 3.73e-02, grad_scale: 8.0 +2023-03-08 15:55:09,211 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7048.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 15:55:22,784 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0430, 3.8763, 4.0632, 3.0689, 2.9470, 2.2237, 2.3502, 1.6937], + device='cuda:1'), covar=tensor([0.0384, 0.0206, 0.0100, 0.0176, 0.0389, 0.0384, 0.0444, 0.0620], + device='cuda:1'), in_proj_covar=tensor([0.0025, 0.0023, 0.0021, 0.0022, 0.0033, 0.0021, 0.0031, 0.0032], + device='cuda:1'), out_proj_covar=tensor([1.0549e-04, 1.0949e-04, 8.1875e-05, 9.1532e-05, 1.3857e-04, 9.1637e-05, + 1.1623e-04, 1.2182e-04], device='cuda:1') +2023-03-08 15:55:52,776 INFO [train.py:898] (1/4) Epoch 2, batch 3450, loss[loss=0.2682, simple_loss=0.3332, pruned_loss=0.1016, over 18252.00 frames. ], tot_loss[loss=0.2914, simple_loss=0.3516, pruned_loss=0.1156, over 3583537.90 frames. ], batch size: 47, lr: 3.72e-02, grad_scale: 8.0 +2023-03-08 15:56:20,877 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7109.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 15:56:30,819 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.505e+02 5.757e+02 6.768e+02 8.417e+02 2.561e+03, threshold=1.354e+03, percent-clipped=6.0 +2023-03-08 15:56:32,057 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7118.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 15:56:37,304 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7123.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:56:51,287 INFO [train.py:898] (1/4) Epoch 2, batch 3500, loss[loss=0.2875, simple_loss=0.357, pruned_loss=0.109, over 17016.00 frames. ], tot_loss[loss=0.2917, simple_loss=0.3522, pruned_loss=0.1156, over 3590954.37 frames. ], batch size: 78, lr: 3.71e-02, grad_scale: 8.0 +2023-03-08 15:57:23,499 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7163.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:57:26,594 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7166.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:57:31,616 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7171.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:57:46,617 INFO [train.py:898] (1/4) Epoch 2, batch 3550, loss[loss=0.3651, simple_loss=0.3961, pruned_loss=0.1671, over 12782.00 frames. ], tot_loss[loss=0.2911, simple_loss=0.3515, pruned_loss=0.1153, over 3582771.02 frames. ], batch size: 129, lr: 3.71e-02, grad_scale: 8.0 +2023-03-08 15:58:22,328 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.147e+02 5.640e+02 6.762e+02 8.858e+02 1.958e+03, threshold=1.352e+03, percent-clipped=2.0 +2023-03-08 15:58:30,199 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7224.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 15:58:40,539 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7234.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 15:58:41,467 INFO [train.py:898] (1/4) Epoch 2, batch 3600, loss[loss=0.2662, simple_loss=0.3261, pruned_loss=0.1032, over 18461.00 frames. ], tot_loss[loss=0.2908, simple_loss=0.351, pruned_loss=0.1153, over 3581279.28 frames. ], batch size: 43, lr: 3.70e-02, grad_scale: 8.0 +2023-03-08 15:58:52,028 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.96 vs. limit=2.0 +2023-03-08 15:59:04,118 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0769, 3.9966, 3.4661, 4.0311, 4.0487, 3.7077, 4.0692, 3.6223], + device='cuda:1'), covar=tensor([0.0364, 0.0519, 0.1810, 0.0516, 0.0303, 0.0385, 0.0368, 0.0560], + device='cuda:1'), in_proj_covar=tensor([0.0191, 0.0210, 0.0345, 0.0168, 0.0160, 0.0196, 0.0208, 0.0235], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 15:59:12,906 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7265.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:59:46,467 INFO [train.py:898] (1/4) Epoch 3, batch 0, loss[loss=0.269, simple_loss=0.3227, pruned_loss=0.1076, over 18435.00 frames. ], tot_loss[loss=0.269, simple_loss=0.3227, pruned_loss=0.1076, over 18435.00 frames. ], batch size: 43, lr: 3.51e-02, grad_scale: 8.0 +2023-03-08 15:59:46,467 INFO [train.py:923] (1/4) Computing validation loss +2023-03-08 15:59:58,151 INFO [train.py:932] (1/4) Epoch 3, validation: loss=0.2228, simple_loss=0.3215, pruned_loss=0.06204, over 944034.00 frames. +2023-03-08 15:59:58,152 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19527MB +2023-03-08 16:00:05,147 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7275.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:00:16,714 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7285.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:00:41,787 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-08 16:00:54,868 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.526e+02 5.312e+02 6.673e+02 8.366e+02 1.427e+03, threshold=1.335e+03, percent-clipped=1.0 +2023-03-08 16:00:57,252 INFO [train.py:898] (1/4) Epoch 3, batch 50, loss[loss=0.2385, simple_loss=0.3029, pruned_loss=0.08699, over 18499.00 frames. ], tot_loss[loss=0.2825, simple_loss=0.3448, pruned_loss=0.1101, over 800395.79 frames. ], batch size: 44, lr: 3.50e-02, grad_scale: 8.0 +2023-03-08 16:01:14,311 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7333.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:01:18,023 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7336.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:01:57,072 INFO [train.py:898] (1/4) Epoch 3, batch 100, loss[loss=0.2636, simple_loss=0.3442, pruned_loss=0.09151, over 18394.00 frames. ], tot_loss[loss=0.2819, simple_loss=0.3454, pruned_loss=0.1093, over 1423447.94 frames. ], batch size: 52, lr: 3.49e-02, grad_scale: 8.0 +2023-03-08 16:02:39,732 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7404.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 16:02:43,174 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7407.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:02:54,251 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 5.439e+02 6.621e+02 7.725e+02 1.513e+03, threshold=1.324e+03, percent-clipped=3.0 +2023-03-08 16:02:56,555 INFO [train.py:898] (1/4) Epoch 3, batch 150, loss[loss=0.2985, simple_loss=0.3611, pruned_loss=0.118, over 18625.00 frames. ], tot_loss[loss=0.2822, simple_loss=0.3448, pruned_loss=0.1097, over 1902608.66 frames. ], batch size: 52, lr: 3.48e-02, grad_scale: 8.0 +2023-03-08 16:03:54,906 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7468.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:03:55,730 INFO [train.py:898] (1/4) Epoch 3, batch 200, loss[loss=0.2477, simple_loss=0.3093, pruned_loss=0.09305, over 18179.00 frames. ], tot_loss[loss=0.2812, simple_loss=0.3441, pruned_loss=0.1092, over 2278577.18 frames. ], batch size: 44, lr: 3.47e-02, grad_scale: 8.0 +2023-03-08 16:04:51,798 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.337e+02 5.369e+02 6.599e+02 8.256e+02 1.502e+03, threshold=1.320e+03, percent-clipped=3.0 +2023-03-08 16:04:54,120 INFO [train.py:898] (1/4) Epoch 3, batch 250, loss[loss=0.2525, simple_loss=0.316, pruned_loss=0.09449, over 18494.00 frames. ], tot_loss[loss=0.2827, simple_loss=0.3458, pruned_loss=0.1098, over 2574778.66 frames. ], batch size: 44, lr: 3.47e-02, grad_scale: 8.0 +2023-03-08 16:04:54,370 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7519.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 16:05:10,782 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7534.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 16:05:28,239 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([1.8910, 4.2919, 4.4504, 4.1481, 2.6342, 2.1166, 3.9969, 4.4432], + device='cuda:1'), covar=tensor([0.1155, 0.0216, 0.0077, 0.0165, 0.0997, 0.1158, 0.0211, 0.0038], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0048, 0.0044, 0.0075, 0.0112, 0.0118, 0.0077, 0.0038], + device='cuda:1'), out_proj_covar=tensor([1.5287e-04, 8.6713e-05, 6.7893e-05, 1.1357e-04, 1.6087e-04, 1.7277e-04, + 1.1652e-04, 5.9565e-05], device='cuda:1') +2023-03-08 16:05:32,652 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1921, 4.0540, 4.4943, 4.1343, 2.6937, 4.4224, 3.7458, 3.0034], + device='cuda:1'), covar=tensor([0.0142, 0.0173, 0.0056, 0.0123, 0.1069, 0.0064, 0.0200, 0.0768], + device='cuda:1'), in_proj_covar=tensor([0.0074, 0.0072, 0.0058, 0.0062, 0.0119, 0.0061, 0.0062, 0.0115], + device='cuda:1'), out_proj_covar=tensor([7.1354e-05, 6.9563e-05, 5.9368e-05, 6.2303e-05, 1.1509e-04, 5.8698e-05, + 6.9436e-05, 1.1425e-04], device='cuda:1') +2023-03-08 16:05:47,529 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7565.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:05:51,577 INFO [train.py:898] (1/4) Epoch 3, batch 300, loss[loss=0.2804, simple_loss=0.3457, pruned_loss=0.1075, over 18479.00 frames. ], tot_loss[loss=0.2821, simple_loss=0.3451, pruned_loss=0.1096, over 2802164.93 frames. ], batch size: 53, lr: 3.46e-02, grad_scale: 8.0 +2023-03-08 16:06:06,075 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7582.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 16:06:26,185 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5805, 3.6606, 3.9589, 2.8388, 3.7263, 3.4971, 3.4500, 2.3334], + device='cuda:1'), covar=tensor([0.0461, 0.0247, 0.0069, 0.0331, 0.0230, 0.0613, 0.0394, 0.0873], + device='cuda:1'), in_proj_covar=tensor([0.0083, 0.0078, 0.0051, 0.0077, 0.0075, 0.0124, 0.0056, 0.0104], + device='cuda:1'), out_proj_covar=tensor([7.9836e-05, 7.5162e-05, 4.7226e-05, 7.5476e-05, 7.5046e-05, 1.1521e-04, + 6.3697e-05, 9.4153e-05], device='cuda:1') +2023-03-08 16:06:38,134 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-08 16:06:43,937 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7613.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:06:48,189 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.523e+02 5.233e+02 6.862e+02 8.770e+02 1.930e+03, threshold=1.372e+03, percent-clipped=4.0 +2023-03-08 16:06:50,539 INFO [train.py:898] (1/4) Epoch 3, batch 350, loss[loss=0.2767, simple_loss=0.3494, pruned_loss=0.102, over 18618.00 frames. ], tot_loss[loss=0.2809, simple_loss=0.3443, pruned_loss=0.1087, over 2988103.85 frames. ], batch size: 52, lr: 3.45e-02, grad_scale: 8.0 +2023-03-08 16:06:56,792 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7624.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:07:04,386 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7631.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:07:49,574 INFO [train.py:898] (1/4) Epoch 3, batch 400, loss[loss=0.2433, simple_loss=0.3225, pruned_loss=0.0821, over 18504.00 frames. ], tot_loss[loss=0.2784, simple_loss=0.3424, pruned_loss=0.1072, over 3130898.70 frames. ], batch size: 51, lr: 3.44e-02, grad_scale: 8.0 +2023-03-08 16:08:07,876 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7685.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:08:18,257 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0194, 2.9039, 1.8801, 2.6396, 2.6179, 2.2620, 2.0983, 3.1059], + device='cuda:1'), covar=tensor([0.0190, 0.0222, 0.0747, 0.0199, 0.0313, 0.0446, 0.0401, 0.0252], + device='cuda:1'), in_proj_covar=tensor([0.0039, 0.0040, 0.0045, 0.0050, 0.0039, 0.0059, 0.0071, 0.0039], + device='cuda:1'), out_proj_covar=tensor([5.5206e-05, 5.7123e-05, 7.1462e-05, 6.8651e-05, 5.6321e-05, 8.7334e-05, + 1.0790e-04, 6.2036e-05], device='cuda:1') +2023-03-08 16:08:29,740 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7704.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 16:08:47,268 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.778e+02 4.774e+02 5.790e+02 7.165e+02 2.375e+03, threshold=1.158e+03, percent-clipped=2.0 +2023-03-08 16:08:48,412 INFO [train.py:898] (1/4) Epoch 3, batch 450, loss[loss=0.2258, simple_loss=0.296, pruned_loss=0.07781, over 18492.00 frames. ], tot_loss[loss=0.2791, simple_loss=0.3428, pruned_loss=0.1077, over 3225208.75 frames. ], batch size: 44, lr: 3.44e-02, grad_scale: 8.0 +2023-03-08 16:09:15,846 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7743.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:09:26,347 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7752.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 16:09:28,043 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.58 vs. limit=2.0 +2023-03-08 16:09:38,558 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7763.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:09:46,279 INFO [train.py:898] (1/4) Epoch 3, batch 500, loss[loss=0.3082, simple_loss=0.3616, pruned_loss=0.1273, over 16105.00 frames. ], tot_loss[loss=0.2818, simple_loss=0.3445, pruned_loss=0.1095, over 3291797.48 frames. ], batch size: 95, lr: 3.43e-02, grad_scale: 8.0 +2023-03-08 16:09:47,144 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.40 vs. limit=5.0 +2023-03-08 16:09:50,841 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=5.50 vs. limit=5.0 +2023-03-08 16:10:26,080 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7802.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:10:28,401 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7804.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:10:45,271 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.544e+02 5.001e+02 6.338e+02 8.345e+02 2.616e+03, threshold=1.268e+03, percent-clipped=10.0 +2023-03-08 16:10:46,496 INFO [train.py:898] (1/4) Epoch 3, batch 550, loss[loss=0.3457, simple_loss=0.3864, pruned_loss=0.1525, over 12305.00 frames. ], tot_loss[loss=0.284, simple_loss=0.3463, pruned_loss=0.1109, over 3335841.95 frames. ], batch size: 131, lr: 3.42e-02, grad_scale: 8.0 +2023-03-08 16:10:46,718 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7819.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 16:11:06,589 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-08 16:11:10,602 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3461, 5.3481, 4.6281, 5.4405, 5.3611, 4.8406, 5.2783, 4.7073], + device='cuda:1'), covar=tensor([0.0336, 0.0272, 0.1602, 0.0381, 0.0299, 0.0397, 0.0320, 0.0521], + device='cuda:1'), in_proj_covar=tensor([0.0206, 0.0222, 0.0357, 0.0172, 0.0168, 0.0200, 0.0217, 0.0254], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-08 16:11:37,589 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7863.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:11:41,859 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7867.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:11:44,740 INFO [train.py:898] (1/4) Epoch 3, batch 600, loss[loss=0.2712, simple_loss=0.3364, pruned_loss=0.103, over 18567.00 frames. ], tot_loss[loss=0.2821, simple_loss=0.3445, pruned_loss=0.1098, over 3396504.77 frames. ], batch size: 54, lr: 3.41e-02, grad_scale: 8.0 +2023-03-08 16:12:25,889 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8938, 4.6244, 4.9399, 4.8295, 4.6894, 4.6441, 5.2903, 5.1445], + device='cuda:1'), covar=tensor([0.0082, 0.0108, 0.0082, 0.0094, 0.0115, 0.0108, 0.0090, 0.0096], + device='cuda:1'), in_proj_covar=tensor([0.0058, 0.0046, 0.0043, 0.0052, 0.0048, 0.0058, 0.0050, 0.0047], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:1') +2023-03-08 16:12:42,170 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.737e+02 5.313e+02 6.628e+02 8.433e+02 2.088e+03, threshold=1.326e+03, percent-clipped=7.0 +2023-03-08 16:12:43,819 INFO [train.py:898] (1/4) Epoch 3, batch 650, loss[loss=0.3274, simple_loss=0.3891, pruned_loss=0.1329, over 17956.00 frames. ], tot_loss[loss=0.2813, simple_loss=0.3436, pruned_loss=0.1095, over 3438538.84 frames. ], batch size: 70, lr: 3.40e-02, grad_scale: 8.0 +2023-03-08 16:12:57,095 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.78 vs. limit=2.0 +2023-03-08 16:12:59,206 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7931.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:13:36,387 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9908, 4.7185, 2.2214, 4.6482, 4.9829, 2.3217, 3.7409, 3.7213], + device='cuda:1'), covar=tensor([0.0048, 0.0317, 0.1682, 0.0233, 0.0035, 0.1477, 0.0618, 0.0696], + device='cuda:1'), in_proj_covar=tensor([0.0067, 0.0081, 0.0153, 0.0112, 0.0060, 0.0141, 0.0142, 0.0133], + device='cuda:1'), out_proj_covar=tensor([7.5937e-05, 1.1262e-04, 1.6195e-04, 1.2580e-04, 6.8698e-05, 1.5390e-04, + 1.5759e-04, 1.5557e-04], device='cuda:1') +2023-03-08 16:13:42,845 INFO [train.py:898] (1/4) Epoch 3, batch 700, loss[loss=0.276, simple_loss=0.3508, pruned_loss=0.1006, over 18627.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.3445, pruned_loss=0.1091, over 3479980.06 frames. ], batch size: 52, lr: 3.40e-02, grad_scale: 8.0 +2023-03-08 16:13:55,675 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7979.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:13:57,409 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7980.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:14:45,128 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.136e+02 5.602e+02 6.674e+02 7.907e+02 1.811e+03, threshold=1.335e+03, percent-clipped=1.0 +2023-03-08 16:14:46,260 INFO [train.py:898] (1/4) Epoch 3, batch 750, loss[loss=0.2488, simple_loss=0.3159, pruned_loss=0.09079, over 18279.00 frames. ], tot_loss[loss=0.2802, simple_loss=0.3437, pruned_loss=0.1084, over 3511934.58 frames. ], batch size: 49, lr: 3.39e-02, grad_scale: 8.0 +2023-03-08 16:14:55,216 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1091, 2.9629, 2.3856, 2.4783, 2.8949, 2.0917, 1.8848, 3.0205], + device='cuda:1'), covar=tensor([0.0183, 0.0141, 0.0502, 0.0252, 0.0165, 0.0472, 0.0582, 0.0258], + device='cuda:1'), in_proj_covar=tensor([0.0040, 0.0043, 0.0046, 0.0052, 0.0040, 0.0061, 0.0073, 0.0042], + device='cuda:1'), out_proj_covar=tensor([5.6136e-05, 6.1696e-05, 7.3382e-05, 7.2062e-05, 5.7563e-05, 9.1447e-05, + 1.1373e-04, 6.6592e-05], device='cuda:1') +2023-03-08 16:15:38,379 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8063.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:15:45,003 INFO [train.py:898] (1/4) Epoch 3, batch 800, loss[loss=0.2915, simple_loss=0.3576, pruned_loss=0.1127, over 18361.00 frames. ], tot_loss[loss=0.2802, simple_loss=0.3437, pruned_loss=0.1083, over 3528665.19 frames. ], batch size: 55, lr: 3.38e-02, grad_scale: 8.0 +2023-03-08 16:16:21,799 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8099.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:16:31,269 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9278, 2.8433, 1.9324, 3.2631, 2.5235, 3.2225, 2.0070, 2.8296], + device='cuda:1'), covar=tensor([0.0421, 0.0771, 0.1214, 0.0368, 0.0699, 0.0147, 0.1050, 0.0376], + device='cuda:1'), in_proj_covar=tensor([0.0101, 0.0142, 0.0139, 0.0087, 0.0128, 0.0068, 0.0133, 0.0121], + device='cuda:1'), out_proj_covar=tensor([1.3808e-04, 1.7212e-04, 1.6470e-04, 1.3772e-04, 1.6107e-04, 8.8384e-05, + 1.5551e-04, 1.4700e-04], device='cuda:1') +2023-03-08 16:16:31,604 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=2.28 vs. limit=2.0 +2023-03-08 16:16:35,653 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8111.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:16:43,310 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.074e+02 5.395e+02 6.687e+02 8.203e+02 1.547e+03, threshold=1.337e+03, percent-clipped=3.0 +2023-03-08 16:16:44,974 INFO [train.py:898] (1/4) Epoch 3, batch 850, loss[loss=0.2964, simple_loss=0.3606, pruned_loss=0.1161, over 17817.00 frames. ], tot_loss[loss=0.2796, simple_loss=0.3433, pruned_loss=0.108, over 3546137.78 frames. ], batch size: 70, lr: 3.37e-02, grad_scale: 8.0 +2023-03-08 16:16:52,247 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7342, 3.4272, 1.5409, 4.3181, 2.6578, 4.3731, 2.0974, 3.8697], + device='cuda:1'), covar=tensor([0.0437, 0.0929, 0.2180, 0.0298, 0.1449, 0.0113, 0.1516, 0.0363], + device='cuda:1'), in_proj_covar=tensor([0.0102, 0.0144, 0.0141, 0.0089, 0.0130, 0.0069, 0.0135, 0.0123], + device='cuda:1'), out_proj_covar=tensor([1.4022e-04, 1.7445e-04, 1.6672e-04, 1.4073e-04, 1.6338e-04, 9.0433e-05, + 1.5847e-04, 1.4855e-04], device='cuda:1') +2023-03-08 16:17:31,438 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8158.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:17:41,857 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8832, 5.2140, 3.2868, 4.9608, 4.8849, 5.1564, 4.8153, 2.5141], + device='cuda:1'), covar=tensor([0.0230, 0.0052, 0.0558, 0.0069, 0.0082, 0.0075, 0.0140, 0.1157], + device='cuda:1'), in_proj_covar=tensor([0.0050, 0.0034, 0.0062, 0.0038, 0.0040, 0.0035, 0.0042, 0.0075], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 16:17:43,855 INFO [train.py:898] (1/4) Epoch 3, batch 900, loss[loss=0.3216, simple_loss=0.38, pruned_loss=0.1316, over 18263.00 frames. ], tot_loss[loss=0.2789, simple_loss=0.3426, pruned_loss=0.1076, over 3562688.52 frames. ], batch size: 60, lr: 3.37e-02, grad_scale: 8.0 +2023-03-08 16:17:50,243 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3781, 3.3811, 1.2622, 4.2206, 2.7924, 4.2161, 2.0718, 3.6446], + device='cuda:1'), covar=tensor([0.0530, 0.0836, 0.2266, 0.0212, 0.1069, 0.0083, 0.1409, 0.0344], + device='cuda:1'), in_proj_covar=tensor([0.0101, 0.0141, 0.0136, 0.0086, 0.0127, 0.0068, 0.0132, 0.0121], + device='cuda:1'), out_proj_covar=tensor([1.3934e-04, 1.7178e-04, 1.6179e-04, 1.3640e-04, 1.5999e-04, 8.8375e-05, + 1.5424e-04, 1.4658e-04], device='cuda:1') +2023-03-08 16:18:22,271 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.95 vs. limit=2.0 +2023-03-08 16:18:42,377 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.447e+02 5.230e+02 6.588e+02 8.350e+02 1.625e+03, threshold=1.318e+03, percent-clipped=3.0 +2023-03-08 16:18:43,442 INFO [train.py:898] (1/4) Epoch 3, batch 950, loss[loss=0.3018, simple_loss=0.3677, pruned_loss=0.118, over 18216.00 frames. ], tot_loss[loss=0.2768, simple_loss=0.3409, pruned_loss=0.1064, over 3578939.67 frames. ], batch size: 60, lr: 3.36e-02, grad_scale: 8.0 +2023-03-08 16:19:37,019 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.4389, 5.2215, 5.1238, 4.9150, 4.8371, 5.0511, 4.4972, 5.0186], + device='cuda:1'), covar=tensor([0.0272, 0.0325, 0.0227, 0.0265, 0.0450, 0.0244, 0.0864, 0.0225], + device='cuda:1'), in_proj_covar=tensor([0.0097, 0.0128, 0.0108, 0.0098, 0.0122, 0.0124, 0.0167, 0.0111], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-08 16:19:38,288 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8265.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:19:42,216 INFO [train.py:898] (1/4) Epoch 3, batch 1000, loss[loss=0.3443, simple_loss=0.3854, pruned_loss=0.1516, over 12717.00 frames. ], tot_loss[loss=0.2763, simple_loss=0.3411, pruned_loss=0.1058, over 3584438.92 frames. ], batch size: 129, lr: 3.35e-02, grad_scale: 8.0 +2023-03-08 16:19:55,436 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8280.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:20:01,779 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.61 vs. limit=2.0 +2023-03-08 16:20:33,064 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8616, 3.9408, 4.9995, 3.3263, 4.2410, 3.7952, 3.7607, 2.1735], + device='cuda:1'), covar=tensor([0.0483, 0.0268, 0.0047, 0.0330, 0.0280, 0.0771, 0.0430, 0.1108], + device='cuda:1'), in_proj_covar=tensor([0.0095, 0.0088, 0.0054, 0.0084, 0.0092, 0.0140, 0.0068, 0.0116], + device='cuda:1'), out_proj_covar=tensor([9.4105e-05, 8.6891e-05, 5.2474e-05, 8.2961e-05, 9.4893e-05, 1.3412e-04, + 7.8267e-05, 1.0828e-04], device='cuda:1') +2023-03-08 16:20:40,390 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.475e+02 5.034e+02 6.200e+02 8.356e+02 1.633e+03, threshold=1.240e+03, percent-clipped=2.0 +2023-03-08 16:20:41,579 INFO [train.py:898] (1/4) Epoch 3, batch 1050, loss[loss=0.2811, simple_loss=0.3479, pruned_loss=0.1072, over 18347.00 frames. ], tot_loss[loss=0.2769, simple_loss=0.3418, pruned_loss=0.106, over 3589841.50 frames. ], batch size: 56, lr: 3.34e-02, grad_scale: 8.0 +2023-03-08 16:20:49,625 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8326.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:20:51,654 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8328.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:21:35,147 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=2.06 vs. limit=2.0 +2023-03-08 16:21:40,129 INFO [train.py:898] (1/4) Epoch 3, batch 1100, loss[loss=0.2592, simple_loss=0.3192, pruned_loss=0.09962, over 18386.00 frames. ], tot_loss[loss=0.2766, simple_loss=0.3415, pruned_loss=0.1059, over 3592411.99 frames. ], batch size: 42, lr: 3.34e-02, grad_scale: 8.0 +2023-03-08 16:22:16,095 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8399.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:22:38,120 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.304e+02 5.764e+02 6.854e+02 8.468e+02 1.709e+03, threshold=1.371e+03, percent-clipped=7.0 +2023-03-08 16:22:39,111 INFO [train.py:898] (1/4) Epoch 3, batch 1150, loss[loss=0.2646, simple_loss=0.3232, pruned_loss=0.103, over 18144.00 frames. ], tot_loss[loss=0.2781, simple_loss=0.3424, pruned_loss=0.1069, over 3581590.94 frames. ], batch size: 44, lr: 3.33e-02, grad_scale: 8.0 +2023-03-08 16:23:12,004 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8447.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:23:26,396 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8458.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:23:38,510 INFO [train.py:898] (1/4) Epoch 3, batch 1200, loss[loss=0.2954, simple_loss=0.3554, pruned_loss=0.1177, over 18091.00 frames. ], tot_loss[loss=0.2783, simple_loss=0.3428, pruned_loss=0.107, over 3582281.12 frames. ], batch size: 62, lr: 3.32e-02, grad_scale: 8.0 +2023-03-08 16:23:42,539 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.3203, 5.4780, 2.6868, 5.1530, 5.0616, 5.5437, 5.3438, 3.0229], + device='cuda:1'), covar=tensor([0.0151, 0.0042, 0.0714, 0.0050, 0.0057, 0.0033, 0.0080, 0.0950], + device='cuda:1'), in_proj_covar=tensor([0.0051, 0.0035, 0.0066, 0.0039, 0.0042, 0.0036, 0.0043, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0003], + device='cuda:1') +2023-03-08 16:23:52,993 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.2633, 3.2474, 2.4795, 2.6115, 2.9113, 2.1329, 2.1636, 3.0875], + device='cuda:1'), covar=tensor([0.0189, 0.0148, 0.0488, 0.0198, 0.0177, 0.0384, 0.0398, 0.0241], + device='cuda:1'), in_proj_covar=tensor([0.0040, 0.0042, 0.0045, 0.0053, 0.0040, 0.0062, 0.0071, 0.0044], + device='cuda:1'), out_proj_covar=tensor([5.7527e-05, 6.2157e-05, 7.2708e-05, 7.4861e-05, 5.9168e-05, 9.4135e-05, + 1.1158e-04, 6.8886e-05], device='cuda:1') +2023-03-08 16:24:22,273 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8506.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:24:25,968 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.57 vs. limit=2.0 +2023-03-08 16:24:31,826 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.7326, 5.2298, 5.6791, 5.5802, 5.4878, 6.3210, 5.7264, 5.6271], + device='cuda:1'), covar=tensor([0.0638, 0.0542, 0.0495, 0.0504, 0.1247, 0.0612, 0.0531, 0.1449], + device='cuda:1'), in_proj_covar=tensor([0.0195, 0.0147, 0.0154, 0.0146, 0.0205, 0.0212, 0.0145, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 16:24:36,205 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.117e+02 5.075e+02 6.913e+02 8.856e+02 3.555e+03, threshold=1.383e+03, percent-clipped=10.0 +2023-03-08 16:24:37,347 INFO [train.py:898] (1/4) Epoch 3, batch 1250, loss[loss=0.2739, simple_loss=0.3512, pruned_loss=0.09833, over 18622.00 frames. ], tot_loss[loss=0.2765, simple_loss=0.3412, pruned_loss=0.1059, over 3593748.97 frames. ], batch size: 52, lr: 3.31e-02, grad_scale: 8.0 +2023-03-08 16:25:36,311 INFO [train.py:898] (1/4) Epoch 3, batch 1300, loss[loss=0.2866, simple_loss=0.3518, pruned_loss=0.1107, over 18554.00 frames. ], tot_loss[loss=0.2763, simple_loss=0.3408, pruned_loss=0.1059, over 3587145.34 frames. ], batch size: 54, lr: 3.31e-02, grad_scale: 8.0 +2023-03-08 16:26:13,135 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4689, 3.1138, 1.4058, 4.0303, 2.6822, 3.9004, 2.1302, 3.5070], + device='cuda:1'), covar=tensor([0.0370, 0.0704, 0.1675, 0.0262, 0.0877, 0.0082, 0.1062, 0.0292], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0146, 0.0139, 0.0095, 0.0130, 0.0068, 0.0133, 0.0122], + device='cuda:1'), out_proj_covar=tensor([1.4176e-04, 1.8142e-04, 1.6855e-04, 1.5331e-04, 1.6799e-04, 9.1929e-05, + 1.5824e-04, 1.5044e-04], device='cuda:1') +2023-03-08 16:26:35,146 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.185e+02 5.027e+02 6.070e+02 7.698e+02 1.470e+03, threshold=1.214e+03, percent-clipped=1.0 +2023-03-08 16:26:36,227 INFO [train.py:898] (1/4) Epoch 3, batch 1350, loss[loss=0.2913, simple_loss=0.3582, pruned_loss=0.1122, over 18297.00 frames. ], tot_loss[loss=0.2759, simple_loss=0.3409, pruned_loss=0.1054, over 3597970.62 frames. ], batch size: 54, lr: 3.30e-02, grad_scale: 8.0 +2023-03-08 16:26:38,818 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8621.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:26:51,107 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.4214, 5.2946, 5.2355, 5.0504, 5.0732, 5.0831, 4.6230, 5.0253], + device='cuda:1'), covar=tensor([0.0300, 0.0222, 0.0189, 0.0210, 0.0296, 0.0237, 0.0778, 0.0214], + device='cuda:1'), in_proj_covar=tensor([0.0095, 0.0128, 0.0108, 0.0097, 0.0121, 0.0123, 0.0169, 0.0111], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:1') +2023-03-08 16:27:07,268 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6829, 1.8647, 4.5682, 2.8739, 3.2721, 4.7159, 4.0672, 4.0962], + device='cuda:1'), covar=tensor([0.0239, 0.0667, 0.0129, 0.0493, 0.1258, 0.0038, 0.0191, 0.0174], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0109, 0.0060, 0.0114, 0.0191, 0.0070, 0.0090, 0.0090], + device='cuda:1'), out_proj_covar=tensor([7.6290e-05, 8.7252e-05, 4.9877e-05, 8.9745e-05, 1.5225e-04, 4.9518e-05, + 7.3735e-05, 7.1736e-05], device='cuda:1') +2023-03-08 16:27:35,132 INFO [train.py:898] (1/4) Epoch 3, batch 1400, loss[loss=0.2373, simple_loss=0.3029, pruned_loss=0.08582, over 18266.00 frames. ], tot_loss[loss=0.2762, simple_loss=0.3411, pruned_loss=0.1057, over 3598989.09 frames. ], batch size: 45, lr: 3.29e-02, grad_scale: 8.0 +2023-03-08 16:27:50,186 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.5436, 4.5934, 4.6452, 4.5056, 4.6808, 4.5448, 4.8509, 4.9138], + device='cuda:1'), covar=tensor([0.0086, 0.0102, 0.0111, 0.0094, 0.0074, 0.0109, 0.0104, 0.0080], + device='cuda:1'), in_proj_covar=tensor([0.0058, 0.0047, 0.0043, 0.0055, 0.0048, 0.0060, 0.0053, 0.0047], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-08 16:28:32,215 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.853e+02 5.972e+02 7.488e+02 8.927e+02 1.917e+03, threshold=1.498e+03, percent-clipped=2.0 +2023-03-08 16:28:33,383 INFO [train.py:898] (1/4) Epoch 3, batch 1450, loss[loss=0.2937, simple_loss=0.3639, pruned_loss=0.1118, over 18184.00 frames. ], tot_loss[loss=0.2766, simple_loss=0.3413, pruned_loss=0.1059, over 3587733.95 frames. ], batch size: 60, lr: 3.29e-02, grad_scale: 8.0 +2023-03-08 16:28:40,238 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4076, 3.0570, 1.4495, 4.0603, 2.5142, 4.2787, 1.8674, 3.6535], + device='cuda:1'), covar=tensor([0.0444, 0.0971, 0.1863, 0.0295, 0.1173, 0.0079, 0.1307, 0.0327], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0151, 0.0140, 0.0095, 0.0133, 0.0069, 0.0138, 0.0126], + device='cuda:1'), out_proj_covar=tensor([1.4332e-04, 1.8929e-04, 1.7080e-04, 1.5416e-04, 1.7224e-04, 9.3759e-05, + 1.6561e-04, 1.5655e-04], device='cuda:1') +2023-03-08 16:29:21,394 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8760.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:29:32,456 INFO [train.py:898] (1/4) Epoch 3, batch 1500, loss[loss=0.2511, simple_loss=0.3142, pruned_loss=0.09399, over 18478.00 frames. ], tot_loss[loss=0.2768, simple_loss=0.3417, pruned_loss=0.1059, over 3581424.94 frames. ], batch size: 44, lr: 3.28e-02, grad_scale: 8.0 +2023-03-08 16:30:29,512 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.789e+02 5.215e+02 6.562e+02 8.622e+02 2.061e+03, threshold=1.312e+03, percent-clipped=5.0 +2023-03-08 16:30:30,762 INFO [train.py:898] (1/4) Epoch 3, batch 1550, loss[loss=0.2926, simple_loss=0.361, pruned_loss=0.1121, over 18584.00 frames. ], tot_loss[loss=0.2778, simple_loss=0.3426, pruned_loss=0.1065, over 3576007.10 frames. ], batch size: 54, lr: 3.27e-02, grad_scale: 8.0 +2023-03-08 16:30:34,101 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8821.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:30:37,021 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2776, 4.8164, 2.2074, 4.7681, 4.8799, 2.4730, 4.3183, 3.8162], + device='cuda:1'), covar=tensor([0.0088, 0.0498, 0.1988, 0.0334, 0.0094, 0.1764, 0.0534, 0.0915], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0096, 0.0157, 0.0127, 0.0064, 0.0148, 0.0152, 0.0144], + device='cuda:1'), out_proj_covar=tensor([8.4835e-05, 1.3374e-04, 1.7311e-04, 1.4534e-04, 7.7574e-05, 1.6864e-04, + 1.7366e-04, 1.7359e-04], device='cuda:1') +2023-03-08 16:31:28,685 INFO [train.py:898] (1/4) Epoch 3, batch 1600, loss[loss=0.2324, simple_loss=0.3056, pruned_loss=0.07957, over 18259.00 frames. ], tot_loss[loss=0.2761, simple_loss=0.3412, pruned_loss=0.1055, over 3586225.14 frames. ], batch size: 47, lr: 3.26e-02, grad_scale: 8.0 +2023-03-08 16:31:52,383 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-08 16:31:56,008 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.02 vs. limit=2.0 +2023-03-08 16:31:59,023 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7010, 4.6378, 4.8595, 4.6175, 4.4370, 4.6095, 5.0259, 5.1187], + device='cuda:1'), covar=tensor([0.0088, 0.0101, 0.0110, 0.0099, 0.0113, 0.0114, 0.0079, 0.0075], + device='cuda:1'), in_proj_covar=tensor([0.0058, 0.0046, 0.0043, 0.0054, 0.0049, 0.0061, 0.0053, 0.0049], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 16:32:17,345 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7015, 3.9432, 4.6211, 2.9603, 3.8747, 3.4598, 3.6777, 2.1196], + device='cuda:1'), covar=tensor([0.0567, 0.0317, 0.0061, 0.0407, 0.0435, 0.0998, 0.0412, 0.1216], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0099, 0.0058, 0.0090, 0.0104, 0.0154, 0.0076, 0.0126], + device='cuda:1'), out_proj_covar=tensor([1.0386e-04, 1.0028e-04, 5.7033e-05, 8.9968e-05, 1.0872e-04, 1.4894e-04, + 9.0099e-05, 1.1995e-04], device='cuda:1') +2023-03-08 16:32:26,430 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.256e+02 5.119e+02 6.262e+02 8.326e+02 2.097e+03, threshold=1.252e+03, percent-clipped=4.0 +2023-03-08 16:32:27,498 INFO [train.py:898] (1/4) Epoch 3, batch 1650, loss[loss=0.3135, simple_loss=0.361, pruned_loss=0.1329, over 18282.00 frames. ], tot_loss[loss=0.2756, simple_loss=0.3408, pruned_loss=0.1052, over 3581017.52 frames. ], batch size: 57, lr: 3.26e-02, grad_scale: 8.0 +2023-03-08 16:32:30,052 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8921.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:32:37,648 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8927.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:33:26,966 INFO [train.py:898] (1/4) Epoch 3, batch 1700, loss[loss=0.2714, simple_loss=0.3349, pruned_loss=0.104, over 18299.00 frames. ], tot_loss[loss=0.2753, simple_loss=0.3406, pruned_loss=0.105, over 3580701.46 frames. ], batch size: 49, lr: 3.25e-02, grad_scale: 8.0 +2023-03-08 16:33:27,141 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8969.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:33:37,027 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1384, 4.6591, 4.9828, 4.7479, 4.7287, 4.7634, 5.2687, 5.1988], + device='cuda:1'), covar=tensor([0.0070, 0.0160, 0.0127, 0.0117, 0.0111, 0.0119, 0.0096, 0.0132], + device='cuda:1'), in_proj_covar=tensor([0.0060, 0.0047, 0.0043, 0.0055, 0.0050, 0.0061, 0.0055, 0.0050], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 16:33:39,852 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1434, 5.7107, 5.3108, 5.4032, 5.0621, 5.4193, 5.8236, 5.6546], + device='cuda:1'), covar=tensor([0.1007, 0.0574, 0.0406, 0.0632, 0.1576, 0.0524, 0.0437, 0.0622], + device='cuda:1'), in_proj_covar=tensor([0.0306, 0.0258, 0.0202, 0.0266, 0.0372, 0.0270, 0.0261, 0.0239], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 16:33:40,036 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0076, 4.4592, 1.7660, 4.5937, 5.0247, 2.3522, 3.7958, 3.7172], + device='cuda:1'), covar=tensor([0.0058, 0.0435, 0.1841, 0.0262, 0.0051, 0.1445, 0.0603, 0.0806], + device='cuda:1'), in_proj_covar=tensor([0.0073, 0.0095, 0.0160, 0.0128, 0.0064, 0.0150, 0.0154, 0.0147], + device='cuda:1'), out_proj_covar=tensor([8.6962e-05, 1.3664e-04, 1.7697e-04, 1.4803e-04, 7.7910e-05, 1.7139e-04, + 1.7771e-04, 1.7706e-04], device='cuda:1') +2023-03-08 16:33:48,764 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8986.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:33:51,291 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8988.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:34:19,920 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9013.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:34:25,795 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.324e+02 5.263e+02 6.332e+02 7.938e+02 1.916e+03, threshold=1.266e+03, percent-clipped=4.0 +2023-03-08 16:34:26,990 INFO [train.py:898] (1/4) Epoch 3, batch 1750, loss[loss=0.2705, simple_loss=0.3444, pruned_loss=0.09829, over 18362.00 frames. ], tot_loss[loss=0.2751, simple_loss=0.3405, pruned_loss=0.1048, over 3579610.28 frames. ], batch size: 55, lr: 3.24e-02, grad_scale: 8.0 +2023-03-08 16:34:54,970 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0009, 2.7981, 2.3692, 2.3423, 2.3913, 2.1594, 2.0888, 2.8974], + device='cuda:1'), covar=tensor([0.0139, 0.0186, 0.0344, 0.0207, 0.0308, 0.0329, 0.0341, 0.0271], + device='cuda:1'), in_proj_covar=tensor([0.0041, 0.0045, 0.0048, 0.0060, 0.0044, 0.0066, 0.0077, 0.0044], + device='cuda:1'), out_proj_covar=tensor([5.9586e-05, 6.8140e-05, 7.7046e-05, 8.7754e-05, 6.6748e-05, 1.0169e-04, + 1.2263e-04, 6.9725e-05], device='cuda:1') +2023-03-08 16:35:00,503 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9047.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:35:00,883 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.10 vs. limit=5.0 +2023-03-08 16:35:25,205 INFO [train.py:898] (1/4) Epoch 3, batch 1800, loss[loss=0.3637, simple_loss=0.4018, pruned_loss=0.1628, over 12604.00 frames. ], tot_loss[loss=0.2756, simple_loss=0.341, pruned_loss=0.1051, over 3575629.78 frames. ], batch size: 130, lr: 3.24e-02, grad_scale: 8.0 +2023-03-08 16:35:31,943 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9074.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:35:47,094 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4894, 3.2437, 1.5986, 4.0790, 2.7646, 4.4225, 2.2244, 4.1735], + device='cuda:1'), covar=tensor([0.0413, 0.0939, 0.1799, 0.0297, 0.0997, 0.0067, 0.1159, 0.0255], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0151, 0.0142, 0.0102, 0.0135, 0.0070, 0.0140, 0.0126], + device='cuda:1'), out_proj_covar=tensor([1.4953e-04, 1.9009e-04, 1.7629e-04, 1.6559e-04, 1.7538e-04, 9.6513e-05, + 1.6959e-04, 1.5753e-04], device='cuda:1') +2023-03-08 16:35:59,669 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.94 vs. limit=2.0 +2023-03-08 16:36:20,744 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9116.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:36:22,835 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.642e+02 5.266e+02 6.349e+02 9.123e+02 1.669e+03, threshold=1.270e+03, percent-clipped=4.0 +2023-03-08 16:36:24,030 INFO [train.py:898] (1/4) Epoch 3, batch 1850, loss[loss=0.2663, simple_loss=0.3381, pruned_loss=0.09721, over 18378.00 frames. ], tot_loss[loss=0.275, simple_loss=0.3405, pruned_loss=0.1048, over 3579036.45 frames. ], batch size: 50, lr: 3.23e-02, grad_scale: 8.0 +2023-03-08 16:37:22,887 INFO [train.py:898] (1/4) Epoch 3, batch 1900, loss[loss=0.3379, simple_loss=0.3906, pruned_loss=0.1426, over 18397.00 frames. ], tot_loss[loss=0.2745, simple_loss=0.3398, pruned_loss=0.1046, over 3583535.80 frames. ], batch size: 52, lr: 3.22e-02, grad_scale: 8.0 +2023-03-08 16:38:20,288 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.837e+02 5.068e+02 6.125e+02 7.856e+02 1.754e+03, threshold=1.225e+03, percent-clipped=6.0 +2023-03-08 16:38:21,520 INFO [train.py:898] (1/4) Epoch 3, batch 1950, loss[loss=0.2648, simple_loss=0.3243, pruned_loss=0.1027, over 18563.00 frames. ], tot_loss[loss=0.2743, simple_loss=0.3396, pruned_loss=0.1045, over 3593952.93 frames. ], batch size: 45, lr: 3.22e-02, grad_scale: 8.0 +2023-03-08 16:38:23,439 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.58 vs. limit=2.0 +2023-03-08 16:39:21,350 INFO [train.py:898] (1/4) Epoch 3, batch 2000, loss[loss=0.314, simple_loss=0.3668, pruned_loss=0.1306, over 17111.00 frames. ], tot_loss[loss=0.2743, simple_loss=0.34, pruned_loss=0.1043, over 3597198.53 frames. ], batch size: 78, lr: 3.21e-02, grad_scale: 8.0 +2023-03-08 16:39:38,155 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9283.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:39:47,699 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5039, 5.1628, 5.5015, 5.4537, 5.3128, 6.0286, 5.5152, 5.5302], + device='cuda:1'), covar=tensor([0.0581, 0.0488, 0.0467, 0.0452, 0.1013, 0.0609, 0.0578, 0.1072], + device='cuda:1'), in_proj_covar=tensor([0.0208, 0.0154, 0.0163, 0.0151, 0.0214, 0.0223, 0.0146, 0.0222], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-08 16:40:19,551 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.362e+02 5.058e+02 6.498e+02 8.656e+02 1.894e+03, threshold=1.300e+03, percent-clipped=4.0 +2023-03-08 16:40:20,760 INFO [train.py:898] (1/4) Epoch 3, batch 2050, loss[loss=0.2956, simple_loss=0.3551, pruned_loss=0.118, over 16172.00 frames. ], tot_loss[loss=0.274, simple_loss=0.3395, pruned_loss=0.1043, over 3591249.34 frames. ], batch size: 95, lr: 3.20e-02, grad_scale: 8.0 +2023-03-08 16:40:47,888 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9342.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:40:50,975 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6897, 3.0535, 2.3425, 3.1402, 3.5479, 3.6591, 3.1530, 3.3397], + device='cuda:1'), covar=tensor([0.0259, 0.0244, 0.0821, 0.0290, 0.0284, 0.0225, 0.0249, 0.0137], + device='cuda:1'), in_proj_covar=tensor([0.0070, 0.0056, 0.0109, 0.0079, 0.0065, 0.0046, 0.0068, 0.0061], + device='cuda:1'), out_proj_covar=tensor([1.2180e-04, 9.9882e-05, 1.7868e-04, 1.2808e-04, 1.1396e-04, 7.7522e-05, + 1.1787e-04, 9.9300e-05], device='cuda:1') +2023-03-08 16:41:06,835 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1134, 5.7266, 5.2689, 5.4169, 5.1563, 5.4459, 5.8268, 5.6706], + device='cuda:1'), covar=tensor([0.1160, 0.0657, 0.0393, 0.0719, 0.1512, 0.0558, 0.0482, 0.0652], + device='cuda:1'), in_proj_covar=tensor([0.0315, 0.0268, 0.0206, 0.0278, 0.0398, 0.0289, 0.0287, 0.0247], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:1') +2023-03-08 16:41:20,234 INFO [train.py:898] (1/4) Epoch 3, batch 2100, loss[loss=0.2324, simple_loss=0.3007, pruned_loss=0.08201, over 17678.00 frames. ], tot_loss[loss=0.2729, simple_loss=0.339, pruned_loss=0.1034, over 3603615.50 frames. ], batch size: 39, lr: 3.20e-02, grad_scale: 8.0 +2023-03-08 16:41:20,494 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9369.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:42:02,464 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.96 vs. limit=2.0 +2023-03-08 16:42:13,664 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9414.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:42:15,879 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9416.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:42:17,744 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.310e+02 4.905e+02 6.085e+02 7.510e+02 1.544e+03, threshold=1.217e+03, percent-clipped=2.0 +2023-03-08 16:42:18,949 INFO [train.py:898] (1/4) Epoch 3, batch 2150, loss[loss=0.2533, simple_loss=0.3184, pruned_loss=0.09411, over 18490.00 frames. ], tot_loss[loss=0.2726, simple_loss=0.3386, pruned_loss=0.1033, over 3597691.64 frames. ], batch size: 47, lr: 3.19e-02, grad_scale: 8.0 +2023-03-08 16:43:12,979 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9464.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:43:18,565 INFO [train.py:898] (1/4) Epoch 3, batch 2200, loss[loss=0.3135, simple_loss=0.3676, pruned_loss=0.1297, over 18529.00 frames. ], tot_loss[loss=0.2728, simple_loss=0.339, pruned_loss=0.1033, over 3595527.99 frames. ], batch size: 49, lr: 3.18e-02, grad_scale: 8.0 +2023-03-08 16:43:25,616 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9475.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:43:35,936 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6746, 4.1385, 4.7776, 3.2121, 4.2044, 3.7186, 3.7662, 2.1264], + device='cuda:1'), covar=tensor([0.0451, 0.0235, 0.0035, 0.0297, 0.0276, 0.0681, 0.0510, 0.0996], + device='cuda:1'), in_proj_covar=tensor([0.0108, 0.0106, 0.0060, 0.0094, 0.0113, 0.0160, 0.0084, 0.0132], + device='cuda:1'), out_proj_covar=tensor([1.0957e-04, 1.0939e-04, 6.1255e-05, 9.6290e-05, 1.1837e-04, 1.5601e-04, + 9.8582e-05, 1.2774e-04], device='cuda:1') +2023-03-08 16:44:16,069 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.011e+02 5.021e+02 6.106e+02 7.632e+02 1.377e+03, threshold=1.221e+03, percent-clipped=3.0 +2023-03-08 16:44:17,244 INFO [train.py:898] (1/4) Epoch 3, batch 2250, loss[loss=0.2654, simple_loss=0.3349, pruned_loss=0.09795, over 18310.00 frames. ], tot_loss[loss=0.2729, simple_loss=0.339, pruned_loss=0.1034, over 3592936.67 frames. ], batch size: 49, lr: 3.18e-02, grad_scale: 8.0 +2023-03-08 16:44:18,822 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.2906, 2.9290, 2.8978, 2.5175, 2.9646, 2.3354, 2.4973, 3.3442], + device='cuda:1'), covar=tensor([0.0094, 0.0161, 0.0210, 0.0207, 0.0146, 0.0302, 0.0272, 0.0093], + device='cuda:1'), in_proj_covar=tensor([0.0041, 0.0045, 0.0048, 0.0061, 0.0044, 0.0067, 0.0080, 0.0045], + device='cuda:1'), out_proj_covar=tensor([5.7930e-05, 6.8934e-05, 7.7307e-05, 8.9830e-05, 6.6236e-05, 1.0436e-04, + 1.3072e-04, 7.0823e-05], device='cuda:1') +2023-03-08 16:45:16,194 INFO [train.py:898] (1/4) Epoch 3, batch 2300, loss[loss=0.2682, simple_loss=0.3193, pruned_loss=0.1085, over 18401.00 frames. ], tot_loss[loss=0.2723, simple_loss=0.3383, pruned_loss=0.1031, over 3601514.18 frames. ], batch size: 42, lr: 3.17e-02, grad_scale: 8.0 +2023-03-08 16:45:32,376 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9583.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:45:33,740 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9584.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:46:14,655 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.768e+02 4.849e+02 6.155e+02 7.957e+02 1.846e+03, threshold=1.231e+03, percent-clipped=3.0 +2023-03-08 16:46:15,812 INFO [train.py:898] (1/4) Epoch 3, batch 2350, loss[loss=0.272, simple_loss=0.3422, pruned_loss=0.1009, over 18310.00 frames. ], tot_loss[loss=0.2721, simple_loss=0.3383, pruned_loss=0.103, over 3606091.83 frames. ], batch size: 54, lr: 3.16e-02, grad_scale: 8.0 +2023-03-08 16:46:29,768 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9631.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:46:42,494 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9642.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:46:46,054 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9645.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:47:00,863 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9658.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:47:14,970 INFO [train.py:898] (1/4) Epoch 3, batch 2400, loss[loss=0.2453, simple_loss=0.311, pruned_loss=0.08977, over 18242.00 frames. ], tot_loss[loss=0.2718, simple_loss=0.3384, pruned_loss=0.1026, over 3606436.93 frames. ], batch size: 45, lr: 3.16e-02, grad_scale: 8.0 +2023-03-08 16:47:15,292 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9669.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:47:29,328 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.04 vs. limit=2.0 +2023-03-08 16:47:39,284 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9690.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:48:10,790 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9717.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:48:13,579 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.796e+02 5.501e+02 6.533e+02 8.034e+02 1.544e+03, threshold=1.307e+03, percent-clipped=2.0 +2023-03-08 16:48:13,605 INFO [train.py:898] (1/4) Epoch 3, batch 2450, loss[loss=0.289, simple_loss=0.3505, pruned_loss=0.1137, over 17987.00 frames. ], tot_loss[loss=0.2708, simple_loss=0.3372, pruned_loss=0.1022, over 3600853.11 frames. ], batch size: 65, lr: 3.15e-02, grad_scale: 8.0 +2023-03-08 16:48:14,041 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9719.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:48:27,757 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.89 vs. limit=5.0 +2023-03-08 16:48:41,464 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9743.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:49:08,353 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4204, 4.8333, 2.1775, 4.7587, 5.3560, 2.6218, 4.4680, 4.0901], + device='cuda:1'), covar=tensor([0.0040, 0.0405, 0.1477, 0.0245, 0.0040, 0.1300, 0.0395, 0.0606], + device='cuda:1'), in_proj_covar=tensor([0.0074, 0.0107, 0.0162, 0.0135, 0.0066, 0.0154, 0.0162, 0.0156], + device='cuda:1'), out_proj_covar=tensor([9.2238e-05, 1.5505e-04, 1.8496e-04, 1.6165e-04, 8.4639e-05, 1.8128e-04, + 1.9116e-04, 1.9526e-04], device='cuda:1') +2023-03-08 16:49:11,284 INFO [train.py:898] (1/4) Epoch 3, batch 2500, loss[loss=0.2646, simple_loss=0.3419, pruned_loss=0.09365, over 18566.00 frames. ], tot_loss[loss=0.2694, simple_loss=0.3361, pruned_loss=0.1013, over 3604310.76 frames. ], batch size: 54, lr: 3.14e-02, grad_scale: 8.0 +2023-03-08 16:49:13,105 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9770.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:49:27,016 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3867, 4.6171, 1.9198, 4.6865, 5.2831, 2.2887, 4.3249, 3.7420], + device='cuda:1'), covar=tensor([0.0036, 0.0549, 0.1562, 0.0260, 0.0034, 0.1414, 0.0421, 0.0722], + device='cuda:1'), in_proj_covar=tensor([0.0074, 0.0107, 0.0162, 0.0135, 0.0065, 0.0154, 0.0161, 0.0156], + device='cuda:1'), out_proj_covar=tensor([9.1724e-05, 1.5451e-04, 1.8438e-04, 1.6170e-04, 8.3872e-05, 1.8162e-04, + 1.9048e-04, 1.9469e-04], device='cuda:1') +2023-03-08 16:49:42,773 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6283, 3.1825, 3.3052, 2.7668, 2.4477, 2.8116, 2.0821, 2.0044], + device='cuda:1'), covar=tensor([0.0175, 0.0138, 0.0054, 0.0165, 0.0264, 0.0126, 0.0506, 0.0583], + device='cuda:1'), in_proj_covar=tensor([0.0027, 0.0026, 0.0023, 0.0028, 0.0040, 0.0023, 0.0043, 0.0048], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 16:49:51,961 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9804.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:49:57,370 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2798, 4.9985, 4.8948, 4.8379, 4.1570, 5.0582, 5.1705, 4.8897], + device='cuda:1'), covar=tensor([0.2047, 0.0915, 0.0649, 0.0959, 0.2798, 0.0805, 0.0670, 0.1014], + device='cuda:1'), in_proj_covar=tensor([0.0328, 0.0270, 0.0214, 0.0288, 0.0409, 0.0289, 0.0286, 0.0254], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:1') +2023-03-08 16:50:09,364 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.524e+02 5.803e+02 7.293e+02 8.388e+02 1.924e+03, threshold=1.459e+03, percent-clipped=4.0 +2023-03-08 16:50:09,400 INFO [train.py:898] (1/4) Epoch 3, batch 2550, loss[loss=0.3076, simple_loss=0.3738, pruned_loss=0.1207, over 18289.00 frames. ], tot_loss[loss=0.2713, simple_loss=0.3375, pruned_loss=0.1025, over 3587258.32 frames. ], batch size: 57, lr: 3.14e-02, grad_scale: 8.0 +2023-03-08 16:50:45,446 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1754, 5.1489, 4.5592, 5.1808, 5.1639, 4.5195, 5.0745, 4.5425], + device='cuda:1'), covar=tensor([0.0417, 0.0407, 0.1874, 0.0575, 0.0428, 0.0467, 0.0425, 0.0741], + device='cuda:1'), in_proj_covar=tensor([0.0239, 0.0253, 0.0408, 0.0204, 0.0191, 0.0233, 0.0252, 0.0299], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004], + device='cuda:1') +2023-03-08 16:51:06,884 INFO [train.py:898] (1/4) Epoch 3, batch 2600, loss[loss=0.2544, simple_loss=0.3289, pruned_loss=0.08995, over 18357.00 frames. ], tot_loss[loss=0.2723, simple_loss=0.3387, pruned_loss=0.103, over 3595922.63 frames. ], batch size: 46, lr: 3.13e-02, grad_scale: 8.0 +2023-03-08 16:52:05,255 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.617e+02 5.195e+02 6.356e+02 7.709e+02 1.531e+03, threshold=1.271e+03, percent-clipped=3.0 +2023-03-08 16:52:05,281 INFO [train.py:898] (1/4) Epoch 3, batch 2650, loss[loss=0.2595, simple_loss=0.3237, pruned_loss=0.09767, over 18485.00 frames. ], tot_loss[loss=0.271, simple_loss=0.3373, pruned_loss=0.1024, over 3592362.41 frames. ], batch size: 47, lr: 3.13e-02, grad_scale: 8.0 +2023-03-08 16:52:31,467 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9940.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:53:03,798 INFO [train.py:898] (1/4) Epoch 3, batch 2700, loss[loss=0.2389, simple_loss=0.2993, pruned_loss=0.08927, over 18442.00 frames. ], tot_loss[loss=0.2703, simple_loss=0.3371, pruned_loss=0.1018, over 3591035.24 frames. ], batch size: 42, lr: 3.12e-02, grad_scale: 8.0 +2023-03-08 16:54:01,205 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10014.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:54:06,413 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.004e+02 5.454e+02 7.297e+02 9.817e+02 2.367e+03, threshold=1.459e+03, percent-clipped=11.0 +2023-03-08 16:54:06,449 INFO [train.py:898] (1/4) Epoch 3, batch 2750, loss[loss=0.2339, simple_loss=0.3031, pruned_loss=0.08235, over 18264.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.337, pruned_loss=0.1018, over 3587783.53 frames. ], batch size: 47, lr: 3.11e-02, grad_scale: 8.0 +2023-03-08 16:54:15,745 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-08 16:54:31,155 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10039.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:54:43,317 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0024, 5.0186, 4.1595, 4.9291, 4.9072, 4.4169, 4.9346, 4.3995], + device='cuda:1'), covar=tensor([0.0385, 0.0433, 0.2023, 0.0595, 0.0524, 0.0489, 0.0336, 0.0613], + device='cuda:1'), in_proj_covar=tensor([0.0234, 0.0249, 0.0397, 0.0197, 0.0185, 0.0229, 0.0244, 0.0287], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004], + device='cuda:1') +2023-03-08 16:54:57,034 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10062.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 16:55:04,435 INFO [train.py:898] (1/4) Epoch 3, batch 2800, loss[loss=0.239, simple_loss=0.3078, pruned_loss=0.08506, over 18411.00 frames. ], tot_loss[loss=0.2699, simple_loss=0.3369, pruned_loss=0.1015, over 3587634.11 frames. ], batch size: 48, lr: 3.11e-02, grad_scale: 8.0 +2023-03-08 16:55:05,816 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10070.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:55:14,300 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3724, 3.3697, 2.7062, 2.8146, 3.2619, 2.6264, 2.7306, 3.4298], + device='cuda:1'), covar=tensor([0.0097, 0.0141, 0.0300, 0.0188, 0.0200, 0.0325, 0.0349, 0.0191], + device='cuda:1'), in_proj_covar=tensor([0.0041, 0.0045, 0.0047, 0.0067, 0.0046, 0.0071, 0.0083, 0.0046], + device='cuda:1'), out_proj_covar=tensor([5.9006e-05, 6.8720e-05, 7.5900e-05, 1.0158e-04, 7.1177e-05, 1.1032e-04, + 1.3669e-04, 7.3439e-05], device='cuda:1') +2023-03-08 16:55:40,458 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10099.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:55:41,785 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10100.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 16:56:01,591 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10118.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:56:02,531 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.762e+02 4.646e+02 5.829e+02 7.162e+02 1.376e+03, threshold=1.166e+03, percent-clipped=0.0 +2023-03-08 16:56:02,556 INFO [train.py:898] (1/4) Epoch 3, batch 2850, loss[loss=0.2645, simple_loss=0.3319, pruned_loss=0.09859, over 18295.00 frames. ], tot_loss[loss=0.2705, simple_loss=0.3372, pruned_loss=0.1018, over 3592796.01 frames. ], batch size: 49, lr: 3.10e-02, grad_scale: 8.0 +2023-03-08 16:56:07,188 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10123.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 16:56:27,143 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.68 vs. limit=2.0 +2023-03-08 16:56:27,878 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1913, 3.0517, 2.6203, 2.5401, 2.7072, 2.3577, 2.2709, 2.9489], + device='cuda:1'), covar=tensor([0.0052, 0.0081, 0.0216, 0.0149, 0.0166, 0.0236, 0.0274, 0.0167], + device='cuda:1'), in_proj_covar=tensor([0.0042, 0.0046, 0.0048, 0.0068, 0.0047, 0.0071, 0.0083, 0.0047], + device='cuda:1'), out_proj_covar=tensor([6.0324e-05, 7.1125e-05, 7.7503e-05, 1.0346e-04, 7.3257e-05, 1.1063e-04, + 1.3687e-04, 7.5639e-05], device='cuda:1') +2023-03-08 16:56:34,858 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-03-08 16:56:49,141 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3852, 2.7821, 2.5116, 2.9188, 3.3726, 3.4011, 2.7818, 3.0928], + device='cuda:1'), covar=tensor([0.0353, 0.0338, 0.0967, 0.0304, 0.0324, 0.0278, 0.0349, 0.0185], + device='cuda:1'), in_proj_covar=tensor([0.0073, 0.0057, 0.0113, 0.0079, 0.0065, 0.0049, 0.0073, 0.0064], + device='cuda:1'), out_proj_covar=tensor([1.3124e-04, 1.0510e-04, 1.8933e-04, 1.3212e-04, 1.1949e-04, 8.2552e-05, + 1.2804e-04, 1.1003e-04], device='cuda:1') +2023-03-08 16:56:58,368 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.4270, 5.5994, 3.3848, 5.2672, 5.3609, 5.6375, 5.5686, 2.6260], + device='cuda:1'), covar=tensor([0.0121, 0.0030, 0.0563, 0.0049, 0.0042, 0.0039, 0.0052, 0.1035], + device='cuda:1'), in_proj_covar=tensor([0.0054, 0.0040, 0.0071, 0.0045, 0.0047, 0.0040, 0.0050, 0.0081], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-08 16:57:01,518 INFO [train.py:898] (1/4) Epoch 3, batch 2900, loss[loss=0.2251, simple_loss=0.2918, pruned_loss=0.07926, over 18356.00 frames. ], tot_loss[loss=0.2702, simple_loss=0.337, pruned_loss=0.1016, over 3591833.23 frames. ], batch size: 42, lr: 3.09e-02, grad_scale: 4.0 +2023-03-08 16:57:05,166 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2032, 5.8612, 5.2818, 5.5222, 5.1968, 5.5061, 5.9444, 5.8801], + device='cuda:1'), covar=tensor([0.1300, 0.0549, 0.0425, 0.0708, 0.1362, 0.0613, 0.0449, 0.0507], + device='cuda:1'), in_proj_covar=tensor([0.0341, 0.0278, 0.0219, 0.0303, 0.0419, 0.0300, 0.0302, 0.0261], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:1') +2023-03-08 16:58:00,061 INFO [train.py:898] (1/4) Epoch 3, batch 2950, loss[loss=0.2672, simple_loss=0.3316, pruned_loss=0.1014, over 18278.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3365, pruned_loss=0.1015, over 3587540.81 frames. ], batch size: 49, lr: 3.09e-02, grad_scale: 4.0 +2023-03-08 16:58:01,190 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.457e+02 4.931e+02 6.360e+02 7.565e+02 1.819e+03, threshold=1.272e+03, percent-clipped=6.0 +2023-03-08 16:58:04,414 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.04 vs. limit=2.0 +2023-03-08 16:58:25,413 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10240.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:58:59,061 INFO [train.py:898] (1/4) Epoch 3, batch 3000, loss[loss=0.2591, simple_loss=0.3307, pruned_loss=0.09381, over 18609.00 frames. ], tot_loss[loss=0.2679, simple_loss=0.3346, pruned_loss=0.1005, over 3596059.88 frames. ], batch size: 52, lr: 3.08e-02, grad_scale: 4.0 +2023-03-08 16:58:59,062 INFO [train.py:923] (1/4) Computing validation loss +2023-03-08 16:59:10,963 INFO [train.py:932] (1/4) Epoch 3, validation: loss=0.2015, simple_loss=0.3025, pruned_loss=0.05021, over 944034.00 frames. +2023-03-08 16:59:10,964 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19527MB +2023-03-08 16:59:34,256 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10288.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:00:03,768 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10314.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:00:08,962 INFO [train.py:898] (1/4) Epoch 3, batch 3050, loss[loss=0.275, simple_loss=0.3399, pruned_loss=0.105, over 18365.00 frames. ], tot_loss[loss=0.269, simple_loss=0.336, pruned_loss=0.101, over 3593279.72 frames. ], batch size: 56, lr: 3.08e-02, grad_scale: 4.0 +2023-03-08 17:00:10,066 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.678e+02 5.040e+02 6.054e+02 8.050e+02 1.536e+03, threshold=1.211e+03, percent-clipped=3.0 +2023-03-08 17:00:17,141 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.69 vs. limit=5.0 +2023-03-08 17:00:28,340 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([1.8682, 3.9774, 4.9980, 3.7839, 2.9459, 2.3379, 4.4418, 4.8577], + device='cuda:1'), covar=tensor([0.1106, 0.0561, 0.0032, 0.0288, 0.0782, 0.1099, 0.0175, 0.0026], + device='cuda:1'), in_proj_covar=tensor([0.0115, 0.0085, 0.0051, 0.0102, 0.0133, 0.0138, 0.0101, 0.0048], + device='cuda:1'), out_proj_covar=tensor([1.8502e-04, 1.5058e-04, 8.3113e-05, 1.6364e-04, 2.0149e-04, 2.1115e-04, + 1.6167e-04, 7.7891e-05], device='cuda:1') +2023-03-08 17:00:42,714 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9216, 3.8156, 4.6417, 3.0868, 4.0437, 3.2067, 3.3331, 2.4561], + device='cuda:1'), covar=tensor([0.0492, 0.0351, 0.0042, 0.0351, 0.0388, 0.1094, 0.0812, 0.1082], + device='cuda:1'), in_proj_covar=tensor([0.0117, 0.0117, 0.0061, 0.0101, 0.0130, 0.0175, 0.0100, 0.0144], + device='cuda:1'), out_proj_covar=tensor([1.1918e-04, 1.2091e-04, 6.5205e-05, 1.0455e-04, 1.3588e-04, 1.7254e-04, + 1.1633e-04, 1.4104e-04], device='cuda:1') +2023-03-08 17:00:59,202 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10362.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:01:07,145 INFO [train.py:898] (1/4) Epoch 3, batch 3100, loss[loss=0.2864, simple_loss=0.3532, pruned_loss=0.1098, over 18260.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3352, pruned_loss=0.1002, over 3593335.22 frames. ], batch size: 60, lr: 3.07e-02, grad_scale: 4.0 +2023-03-08 17:01:07,463 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1849, 4.6916, 4.7538, 4.5186, 4.3823, 4.5910, 3.9588, 4.5628], + device='cuda:1'), covar=tensor([0.0294, 0.0331, 0.0233, 0.0256, 0.0452, 0.0264, 0.1266, 0.0326], + device='cuda:1'), in_proj_covar=tensor([0.0108, 0.0140, 0.0125, 0.0112, 0.0140, 0.0139, 0.0199, 0.0127], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-08 17:01:38,740 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10395.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 17:01:43,459 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10399.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:01:54,985 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10409.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:02:05,057 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10418.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 17:02:06,015 INFO [train.py:898] (1/4) Epoch 3, batch 3150, loss[loss=0.2254, simple_loss=0.2883, pruned_loss=0.08122, over 18477.00 frames. ], tot_loss[loss=0.2675, simple_loss=0.335, pruned_loss=0.1, over 3597065.52 frames. ], batch size: 44, lr: 3.06e-02, grad_scale: 4.0 +2023-03-08 17:02:07,188 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.149e+02 4.695e+02 5.685e+02 7.343e+02 1.972e+03, threshold=1.137e+03, percent-clipped=4.0 +2023-03-08 17:02:24,394 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6111, 3.4143, 2.9588, 2.6959, 2.9780, 2.6892, 2.3412, 3.4950], + device='cuda:1'), covar=tensor([0.0059, 0.0108, 0.0207, 0.0149, 0.0140, 0.0212, 0.0275, 0.0136], + device='cuda:1'), in_proj_covar=tensor([0.0043, 0.0048, 0.0047, 0.0072, 0.0047, 0.0074, 0.0087, 0.0048], + device='cuda:1'), out_proj_covar=tensor([6.2818e-05, 7.5152e-05, 7.7293e-05, 1.1036e-04, 7.2350e-05, 1.1523e-04, + 1.4302e-04, 7.7136e-05], device='cuda:1') +2023-03-08 17:02:40,170 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10447.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:02:48,027 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10454.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:03:04,448 INFO [train.py:898] (1/4) Epoch 3, batch 3200, loss[loss=0.2759, simple_loss=0.3381, pruned_loss=0.1068, over 18244.00 frames. ], tot_loss[loss=0.2674, simple_loss=0.3351, pruned_loss=0.09986, over 3601743.30 frames. ], batch size: 60, lr: 3.06e-02, grad_scale: 8.0 +2023-03-08 17:03:05,980 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10470.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:03:48,821 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6693, 3.4971, 1.4977, 4.4423, 3.3047, 4.6616, 2.6225, 4.2052], + device='cuda:1'), covar=tensor([0.0474, 0.0912, 0.1789, 0.0267, 0.0818, 0.0054, 0.1100, 0.0255], + device='cuda:1'), in_proj_covar=tensor([0.0127, 0.0168, 0.0150, 0.0114, 0.0143, 0.0076, 0.0149, 0.0135], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 17:03:59,352 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10515.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 17:04:03,408 INFO [train.py:898] (1/4) Epoch 3, batch 3250, loss[loss=0.2684, simple_loss=0.3294, pruned_loss=0.1037, over 18263.00 frames. ], tot_loss[loss=0.2663, simple_loss=0.3342, pruned_loss=0.09922, over 3600226.22 frames. ], batch size: 47, lr: 3.05e-02, grad_scale: 8.0 +2023-03-08 17:04:04,543 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.357e+02 4.994e+02 6.141e+02 8.166e+02 2.371e+03, threshold=1.228e+03, percent-clipped=6.0 +2023-03-08 17:05:01,943 INFO [train.py:898] (1/4) Epoch 3, batch 3300, loss[loss=0.3017, simple_loss=0.3604, pruned_loss=0.1215, over 12450.00 frames. ], tot_loss[loss=0.2662, simple_loss=0.3339, pruned_loss=0.09929, over 3592593.27 frames. ], batch size: 131, lr: 3.05e-02, grad_scale: 8.0 +2023-03-08 17:06:01,134 INFO [train.py:898] (1/4) Epoch 3, batch 3350, loss[loss=0.2652, simple_loss=0.3394, pruned_loss=0.09551, over 18283.00 frames. ], tot_loss[loss=0.2663, simple_loss=0.3339, pruned_loss=0.09934, over 3600895.28 frames. ], batch size: 57, lr: 3.04e-02, grad_scale: 8.0 +2023-03-08 17:06:02,282 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.520e+02 5.384e+02 6.304e+02 8.125e+02 1.835e+03, threshold=1.261e+03, percent-clipped=2.0 +2023-03-08 17:06:10,440 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8484, 3.4995, 1.7605, 4.4573, 3.1016, 4.5661, 2.0571, 4.0999], + device='cuda:1'), covar=tensor([0.0368, 0.0762, 0.1464, 0.0207, 0.0806, 0.0093, 0.1257, 0.0268], + device='cuda:1'), in_proj_covar=tensor([0.0128, 0.0165, 0.0149, 0.0113, 0.0144, 0.0078, 0.0149, 0.0133], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 17:06:59,042 INFO [train.py:898] (1/4) Epoch 3, batch 3400, loss[loss=0.2749, simple_loss=0.3204, pruned_loss=0.1147, over 18433.00 frames. ], tot_loss[loss=0.266, simple_loss=0.3333, pruned_loss=0.09939, over 3604514.37 frames. ], batch size: 43, lr: 3.03e-02, grad_scale: 8.0 +2023-03-08 17:07:30,318 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10695.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:07:35,601 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.66 vs. limit=5.0 +2023-03-08 17:07:43,502 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6418, 1.6202, 4.5467, 2.8007, 3.4225, 4.8958, 4.4209, 4.2105], + device='cuda:1'), covar=tensor([0.0206, 0.0805, 0.0132, 0.0534, 0.0999, 0.0025, 0.0147, 0.0138], + device='cuda:1'), in_proj_covar=tensor([0.0109, 0.0140, 0.0071, 0.0141, 0.0223, 0.0079, 0.0112, 0.0110], + device='cuda:1'), out_proj_covar=tensor([9.0610e-05, 1.1521e-04, 6.2702e-05, 1.0916e-04, 1.7886e-04, 5.7307e-05, + 9.3625e-05, 8.9565e-05], device='cuda:1') +2023-03-08 17:07:56,804 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10718.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 17:07:57,571 INFO [train.py:898] (1/4) Epoch 3, batch 3450, loss[loss=0.2225, simple_loss=0.303, pruned_loss=0.07104, over 18495.00 frames. ], tot_loss[loss=0.2651, simple_loss=0.3324, pruned_loss=0.09887, over 3608659.67 frames. ], batch size: 47, lr: 3.03e-02, grad_scale: 8.0 +2023-03-08 17:07:58,727 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.826e+02 5.643e+02 6.682e+02 8.838e+02 1.430e+03, threshold=1.336e+03, percent-clipped=8.0 +2023-03-08 17:08:24,803 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10743.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:08:26,035 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10744.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:08:51,332 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10765.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:08:52,390 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10766.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 17:08:55,407 INFO [train.py:898] (1/4) Epoch 3, batch 3500, loss[loss=0.296, simple_loss=0.3555, pruned_loss=0.1183, over 18459.00 frames. ], tot_loss[loss=0.2645, simple_loss=0.3321, pruned_loss=0.09849, over 3600178.62 frames. ], batch size: 59, lr: 3.02e-02, grad_scale: 8.0 +2023-03-08 17:09:01,515 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10774.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:09:36,587 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10805.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:09:41,885 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10810.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 17:09:51,965 INFO [train.py:898] (1/4) Epoch 3, batch 3550, loss[loss=0.2992, simple_loss=0.3625, pruned_loss=0.1179, over 17757.00 frames. ], tot_loss[loss=0.2646, simple_loss=0.3322, pruned_loss=0.09853, over 3591310.29 frames. ], batch size: 70, lr: 3.02e-02, grad_scale: 8.0 +2023-03-08 17:09:52,945 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.843e+02 4.878e+02 6.150e+02 7.630e+02 1.368e+03, threshold=1.230e+03, percent-clipped=1.0 +2023-03-08 17:10:09,107 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10835.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:10:46,351 INFO [train.py:898] (1/4) Epoch 3, batch 3600, loss[loss=0.2738, simple_loss=0.3419, pruned_loss=0.1029, over 18299.00 frames. ], tot_loss[loss=0.2649, simple_loss=0.3319, pruned_loss=0.09891, over 3584901.40 frames. ], batch size: 57, lr: 3.01e-02, grad_scale: 8.0 +2023-03-08 17:10:56,349 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.5058, 4.3157, 4.5032, 4.3169, 4.3561, 4.3666, 4.7544, 4.7815], + device='cuda:1'), covar=tensor([0.0076, 0.0110, 0.0106, 0.0097, 0.0122, 0.0105, 0.0079, 0.0092], + device='cuda:1'), in_proj_covar=tensor([0.0060, 0.0047, 0.0045, 0.0056, 0.0051, 0.0064, 0.0052, 0.0049], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 17:11:03,863 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10885.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 17:11:51,265 INFO [train.py:898] (1/4) Epoch 4, batch 0, loss[loss=0.2645, simple_loss=0.327, pruned_loss=0.101, over 18268.00 frames. ], tot_loss[loss=0.2645, simple_loss=0.327, pruned_loss=0.101, over 18268.00 frames. ], batch size: 49, lr: 2.81e-02, grad_scale: 8.0 +2023-03-08 17:11:51,265 INFO [train.py:923] (1/4) Computing validation loss +2023-03-08 17:12:03,131 INFO [train.py:932] (1/4) Epoch 4, validation: loss=0.2018, simple_loss=0.3032, pruned_loss=0.05022, over 944034.00 frames. +2023-03-08 17:12:03,132 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19527MB +2023-03-08 17:12:08,317 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3906, 1.7897, 4.4619, 2.5526, 3.5093, 4.6900, 4.2663, 4.1591], + device='cuda:1'), covar=tensor([0.0291, 0.0852, 0.0158, 0.0635, 0.1061, 0.0030, 0.0194, 0.0192], + device='cuda:1'), in_proj_covar=tensor([0.0111, 0.0144, 0.0074, 0.0143, 0.0226, 0.0080, 0.0117, 0.0109], + device='cuda:1'), out_proj_covar=tensor([9.2105e-05, 1.1874e-04, 6.4765e-05, 1.1014e-04, 1.8105e-04, 5.8198e-05, + 9.7915e-05, 8.7536e-05], device='cuda:1') +2023-03-08 17:12:22,936 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.062e+02 5.140e+02 6.071e+02 7.420e+02 1.697e+03, threshold=1.214e+03, percent-clipped=4.0 +2023-03-08 17:12:36,624 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.5627, 5.2315, 5.2688, 5.0483, 5.0063, 5.0896, 4.5129, 5.0633], + device='cuda:1'), covar=tensor([0.0241, 0.0244, 0.0172, 0.0164, 0.0299, 0.0211, 0.0992, 0.0229], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0138, 0.0121, 0.0110, 0.0135, 0.0131, 0.0188, 0.0121], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-08 17:12:55,119 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10946.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 17:13:02,667 INFO [train.py:898] (1/4) Epoch 4, batch 50, loss[loss=0.2727, simple_loss=0.3457, pruned_loss=0.09988, over 18498.00 frames. ], tot_loss[loss=0.26, simple_loss=0.3283, pruned_loss=0.09587, over 815333.25 frames. ], batch size: 51, lr: 2.81e-02, grad_scale: 8.0 +2023-03-08 17:13:26,932 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4128, 4.2628, 4.4140, 3.3648, 3.1195, 2.9240, 2.1996, 1.7738], + device='cuda:1'), covar=tensor([0.0247, 0.0167, 0.0037, 0.0194, 0.0420, 0.0209, 0.0695, 0.0854], + device='cuda:1'), in_proj_covar=tensor([0.0029, 0.0030, 0.0024, 0.0032, 0.0048, 0.0025, 0.0050, 0.0053], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 17:13:41,612 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1303, 5.1567, 4.2054, 4.9971, 4.9968, 4.5814, 4.9702, 4.4801], + device='cuda:1'), covar=tensor([0.0551, 0.0473, 0.2203, 0.0960, 0.0471, 0.0493, 0.0584, 0.0670], + device='cuda:1'), in_proj_covar=tensor([0.0243, 0.0266, 0.0422, 0.0218, 0.0198, 0.0244, 0.0266, 0.0309], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-08 17:14:00,535 INFO [train.py:898] (1/4) Epoch 4, batch 100, loss[loss=0.2525, simple_loss=0.3256, pruned_loss=0.08968, over 18215.00 frames. ], tot_loss[loss=0.2599, simple_loss=0.3275, pruned_loss=0.09617, over 1417510.47 frames. ], batch size: 60, lr: 2.80e-02, grad_scale: 8.0 +2023-03-08 17:14:19,672 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.738e+02 4.868e+02 5.817e+02 7.449e+02 2.139e+03, threshold=1.163e+03, percent-clipped=6.0 +2023-03-08 17:14:45,075 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-08 17:14:58,819 INFO [train.py:898] (1/4) Epoch 4, batch 150, loss[loss=0.2373, simple_loss=0.3023, pruned_loss=0.08614, over 18443.00 frames. ], tot_loss[loss=0.2583, simple_loss=0.3266, pruned_loss=0.09495, over 1900460.25 frames. ], batch size: 43, lr: 2.80e-02, grad_scale: 8.0 +2023-03-08 17:15:12,357 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11065.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:15:47,875 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-03-08 17:15:54,011 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11100.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:15:54,195 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3703, 4.0907, 4.2317, 2.8743, 3.0214, 2.4267, 2.0556, 1.8344], + device='cuda:1'), covar=tensor([0.0192, 0.0156, 0.0052, 0.0231, 0.0348, 0.0257, 0.0760, 0.0866], + device='cuda:1'), in_proj_covar=tensor([0.0029, 0.0029, 0.0025, 0.0031, 0.0047, 0.0024, 0.0048, 0.0051], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0003, 0.0001, 0.0002, 0.0003], + device='cuda:1') +2023-03-08 17:15:57,094 INFO [train.py:898] (1/4) Epoch 4, batch 200, loss[loss=0.2428, simple_loss=0.323, pruned_loss=0.08125, over 18286.00 frames. ], tot_loss[loss=0.2558, simple_loss=0.3248, pruned_loss=0.09341, over 2282156.99 frames. ], batch size: 49, lr: 2.79e-02, grad_scale: 8.0 +2023-03-08 17:16:05,191 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11110.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 17:16:08,738 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11113.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:16:10,299 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.81 vs. limit=5.0 +2023-03-08 17:16:16,248 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.946e+02 4.745e+02 5.920e+02 7.633e+02 1.560e+03, threshold=1.184e+03, percent-clipped=4.0 +2023-03-08 17:16:26,642 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2110, 5.2223, 4.4632, 5.1843, 5.2102, 4.5546, 5.1475, 4.5302], + device='cuda:1'), covar=tensor([0.0381, 0.0309, 0.1733, 0.0595, 0.0335, 0.0407, 0.0329, 0.0666], + device='cuda:1'), in_proj_covar=tensor([0.0253, 0.0273, 0.0429, 0.0219, 0.0204, 0.0246, 0.0264, 0.0317], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-08 17:16:27,642 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11130.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:16:43,225 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4675, 3.1762, 1.6261, 4.1909, 2.8355, 4.3008, 1.9888, 3.6035], + device='cuda:1'), covar=tensor([0.0432, 0.0837, 0.1641, 0.0226, 0.0851, 0.0083, 0.1215, 0.0321], + device='cuda:1'), in_proj_covar=tensor([0.0129, 0.0166, 0.0153, 0.0115, 0.0145, 0.0080, 0.0149, 0.0135], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 17:16:55,259 INFO [train.py:898] (1/4) Epoch 4, batch 250, loss[loss=0.2061, simple_loss=0.2814, pruned_loss=0.06544, over 18510.00 frames. ], tot_loss[loss=0.2537, simple_loss=0.3232, pruned_loss=0.09213, over 2583612.76 frames. ], batch size: 44, lr: 2.79e-02, grad_scale: 8.0 +2023-03-08 17:17:01,242 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11158.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:17:19,524 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11174.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:17:54,180 INFO [train.py:898] (1/4) Epoch 4, batch 300, loss[loss=0.2554, simple_loss=0.322, pruned_loss=0.09437, over 18401.00 frames. ], tot_loss[loss=0.2557, simple_loss=0.3255, pruned_loss=0.09295, over 2814022.17 frames. ], batch size: 50, lr: 2.78e-02, grad_scale: 8.0 +2023-03-08 17:18:13,961 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 4.613e+02 5.593e+02 6.411e+02 1.138e+03, threshold=1.119e+03, percent-clipped=0.0 +2023-03-08 17:18:31,447 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11235.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:18:38,740 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11241.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 17:18:50,877 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-08 17:18:53,657 INFO [train.py:898] (1/4) Epoch 4, batch 350, loss[loss=0.23, simple_loss=0.3075, pruned_loss=0.07626, over 18300.00 frames. ], tot_loss[loss=0.256, simple_loss=0.3258, pruned_loss=0.09308, over 2993944.77 frames. ], batch size: 49, lr: 2.78e-02, grad_scale: 8.0 +2023-03-08 17:18:57,683 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5332, 3.4387, 2.8425, 2.7489, 3.1283, 2.5421, 2.4969, 3.5400], + device='cuda:1'), covar=tensor([0.0051, 0.0095, 0.0175, 0.0137, 0.0100, 0.0199, 0.0265, 0.0104], + device='cuda:1'), in_proj_covar=tensor([0.0044, 0.0049, 0.0049, 0.0076, 0.0051, 0.0079, 0.0091, 0.0048], + device='cuda:1'), out_proj_covar=tensor([6.5983e-05, 7.8201e-05, 8.1824e-05, 1.1939e-04, 8.0459e-05, 1.2545e-04, + 1.4869e-04, 7.7136e-05], device='cuda:1') +2023-03-08 17:19:52,174 INFO [train.py:898] (1/4) Epoch 4, batch 400, loss[loss=0.2823, simple_loss=0.349, pruned_loss=0.1078, over 18507.00 frames. ], tot_loss[loss=0.2561, simple_loss=0.3262, pruned_loss=0.09302, over 3126192.14 frames. ], batch size: 59, lr: 2.77e-02, grad_scale: 8.0 +2023-03-08 17:20:10,946 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.476e+02 4.514e+02 5.687e+02 6.910e+02 1.325e+03, threshold=1.137e+03, percent-clipped=4.0 +2023-03-08 17:20:50,457 INFO [train.py:898] (1/4) Epoch 4, batch 450, loss[loss=0.2709, simple_loss=0.3424, pruned_loss=0.0997, over 18613.00 frames. ], tot_loss[loss=0.2565, simple_loss=0.3264, pruned_loss=0.09331, over 3229183.56 frames. ], batch size: 52, lr: 2.77e-02, grad_scale: 8.0 +2023-03-08 17:21:45,809 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11400.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:21:49,042 INFO [train.py:898] (1/4) Epoch 4, batch 500, loss[loss=0.2138, simple_loss=0.2786, pruned_loss=0.07454, over 18410.00 frames. ], tot_loss[loss=0.2561, simple_loss=0.3263, pruned_loss=0.09299, over 3303166.42 frames. ], batch size: 42, lr: 2.76e-02, grad_scale: 4.0 +2023-03-08 17:21:55,525 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9882, 4.2511, 5.1067, 3.2363, 4.3320, 3.2286, 3.6282, 2.4001], + device='cuda:1'), covar=tensor([0.0472, 0.0288, 0.0042, 0.0367, 0.0349, 0.1006, 0.0827, 0.1183], + device='cuda:1'), in_proj_covar=tensor([0.0127, 0.0127, 0.0066, 0.0107, 0.0138, 0.0179, 0.0120, 0.0151], + device='cuda:1'), out_proj_covar=tensor([1.2837e-04, 1.3099e-04, 6.9728e-05, 1.1126e-04, 1.4568e-04, 1.7929e-04, + 1.3511e-04, 1.5100e-04], device='cuda:1') +2023-03-08 17:22:09,638 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.774e+02 5.147e+02 6.792e+02 8.631e+02 2.583e+03, threshold=1.358e+03, percent-clipped=9.0 +2023-03-08 17:22:16,267 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-03-08 17:22:20,292 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11430.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:22:34,266 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.3083, 5.2814, 3.1095, 5.1656, 4.8873, 5.3307, 5.1376, 3.0440], + device='cuda:1'), covar=tensor([0.0131, 0.0042, 0.0581, 0.0057, 0.0068, 0.0050, 0.0083, 0.0793], + device='cuda:1'), in_proj_covar=tensor([0.0058, 0.0043, 0.0075, 0.0051, 0.0051, 0.0042, 0.0056, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 17:22:40,921 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11448.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:22:46,351 INFO [train.py:898] (1/4) Epoch 4, batch 550, loss[loss=0.2266, simple_loss=0.3016, pruned_loss=0.07584, over 18502.00 frames. ], tot_loss[loss=0.2558, simple_loss=0.3259, pruned_loss=0.09289, over 3377279.33 frames. ], batch size: 47, lr: 2.76e-02, grad_scale: 4.0 +2023-03-08 17:22:59,757 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.18 vs. limit=5.0 +2023-03-08 17:23:17,135 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11478.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:23:40,055 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.3216, 3.8568, 5.0359, 3.8921, 3.1307, 2.5278, 4.1907, 4.8038], + device='cuda:1'), covar=tensor([0.1066, 0.0700, 0.0042, 0.0296, 0.0854, 0.1128, 0.0264, 0.0033], + device='cuda:1'), in_proj_covar=tensor([0.0121, 0.0103, 0.0057, 0.0110, 0.0141, 0.0146, 0.0113, 0.0055], + device='cuda:1'), out_proj_covar=tensor([1.9647e-04, 1.8108e-04, 9.5257e-05, 1.7982e-04, 2.1573e-04, 2.2754e-04, + 1.8152e-04, 8.9840e-05], device='cuda:1') +2023-03-08 17:23:45,220 INFO [train.py:898] (1/4) Epoch 4, batch 600, loss[loss=0.2588, simple_loss=0.3376, pruned_loss=0.08997, over 18369.00 frames. ], tot_loss[loss=0.2539, simple_loss=0.3239, pruned_loss=0.09191, over 3429821.18 frames. ], batch size: 55, lr: 2.75e-02, grad_scale: 4.0 +2023-03-08 17:24:07,208 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.433e+02 4.507e+02 5.343e+02 7.594e+02 1.826e+03, threshold=1.069e+03, percent-clipped=2.0 +2023-03-08 17:24:17,750 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11530.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:24:20,309 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11532.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:24:24,656 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5965, 3.3238, 1.6264, 4.1370, 2.9844, 4.3523, 2.2087, 3.7842], + device='cuda:1'), covar=tensor([0.0404, 0.0730, 0.1550, 0.0283, 0.0756, 0.0071, 0.1111, 0.0315], + device='cuda:1'), in_proj_covar=tensor([0.0132, 0.0175, 0.0156, 0.0123, 0.0152, 0.0086, 0.0154, 0.0139], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 17:24:30,014 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11541.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 17:24:43,306 INFO [train.py:898] (1/4) Epoch 4, batch 650, loss[loss=0.2558, simple_loss=0.3341, pruned_loss=0.08878, over 18296.00 frames. ], tot_loss[loss=0.254, simple_loss=0.324, pruned_loss=0.09202, over 3476447.31 frames. ], batch size: 54, lr: 2.75e-02, grad_scale: 4.0 +2023-03-08 17:25:25,675 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11589.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 17:25:30,225 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11593.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:25:41,338 INFO [train.py:898] (1/4) Epoch 4, batch 700, loss[loss=0.2623, simple_loss=0.3391, pruned_loss=0.09271, over 17015.00 frames. ], tot_loss[loss=0.2548, simple_loss=0.3246, pruned_loss=0.09247, over 3492493.15 frames. ], batch size: 78, lr: 2.74e-02, grad_scale: 4.0 +2023-03-08 17:26:03,623 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.297e+02 5.310e+02 6.723e+02 8.319e+02 1.846e+03, threshold=1.345e+03, percent-clipped=5.0 +2023-03-08 17:26:19,788 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11635.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:26:40,002 INFO [train.py:898] (1/4) Epoch 4, batch 750, loss[loss=0.2558, simple_loss=0.3245, pruned_loss=0.09358, over 18493.00 frames. ], tot_loss[loss=0.2545, simple_loss=0.3246, pruned_loss=0.09217, over 3510246.79 frames. ], batch size: 51, lr: 2.74e-02, grad_scale: 4.0 +2023-03-08 17:27:29,065 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1682, 2.1526, 4.1890, 4.1380, 2.3887, 4.3026, 3.8302, 2.6370], + device='cuda:1'), covar=tensor([0.0158, 0.0994, 0.0066, 0.0125, 0.1332, 0.0093, 0.0300, 0.0901], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0138, 0.0076, 0.0086, 0.0158, 0.0095, 0.0099, 0.0150], + device='cuda:1'), out_proj_covar=tensor([1.0138e-04, 1.3383e-04, 7.9786e-05, 8.2995e-05, 1.5249e-04, 9.0323e-05, + 1.0445e-04, 1.5189e-04], device='cuda:1') +2023-03-08 17:27:31,303 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11696.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:27:34,646 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11699.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:27:38,703 INFO [train.py:898] (1/4) Epoch 4, batch 800, loss[loss=0.228, simple_loss=0.295, pruned_loss=0.08052, over 18277.00 frames. ], tot_loss[loss=0.253, simple_loss=0.3235, pruned_loss=0.09132, over 3529664.69 frames. ], batch size: 45, lr: 2.73e-02, grad_scale: 8.0 +2023-03-08 17:28:00,219 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.738e+02 5.059e+02 5.928e+02 8.034e+02 1.891e+03, threshold=1.186e+03, percent-clipped=2.0 +2023-03-08 17:28:04,779 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-08 17:28:17,649 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5196, 3.5286, 1.5203, 4.2107, 3.0222, 4.6205, 2.4528, 4.1112], + device='cuda:1'), covar=tensor([0.0491, 0.0809, 0.1719, 0.0316, 0.0967, 0.0074, 0.1158, 0.0278], + device='cuda:1'), in_proj_covar=tensor([0.0129, 0.0177, 0.0153, 0.0124, 0.0153, 0.0084, 0.0155, 0.0136], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 17:28:37,659 INFO [train.py:898] (1/4) Epoch 4, batch 850, loss[loss=0.2677, simple_loss=0.3426, pruned_loss=0.09645, over 18275.00 frames. ], tot_loss[loss=0.2538, simple_loss=0.3242, pruned_loss=0.09174, over 3552512.36 frames. ], batch size: 60, lr: 2.73e-02, grad_scale: 8.0 +2023-03-08 17:28:39,473 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.02 vs. limit=2.0 +2023-03-08 17:28:46,051 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11760.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:29:37,626 INFO [train.py:898] (1/4) Epoch 4, batch 900, loss[loss=0.2176, simple_loss=0.2831, pruned_loss=0.07606, over 17746.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.3248, pruned_loss=0.09194, over 3570849.98 frames. ], batch size: 39, lr: 2.72e-02, grad_scale: 8.0 +2023-03-08 17:29:52,697 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11816.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:29:58,021 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.973e+02 4.760e+02 5.740e+02 6.849e+02 1.554e+03, threshold=1.148e+03, percent-clipped=4.0 +2023-03-08 17:30:11,091 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11830.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:30:20,677 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-08 17:30:27,163 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9767, 4.2972, 2.0076, 4.3393, 5.0395, 2.4485, 3.7101, 3.6912], + device='cuda:1'), covar=tensor([0.0076, 0.0849, 0.1767, 0.0342, 0.0037, 0.1387, 0.0647, 0.0770], + device='cuda:1'), in_proj_covar=tensor([0.0075, 0.0129, 0.0164, 0.0148, 0.0070, 0.0158, 0.0171, 0.0161], + device='cuda:1'), out_proj_covar=tensor([1.0052e-04, 1.8615e-04, 2.0131e-04, 1.8641e-04, 9.2832e-05, 1.9746e-04, + 2.1235e-04, 2.0907e-04], device='cuda:1') +2023-03-08 17:30:29,242 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11846.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:30:36,937 INFO [train.py:898] (1/4) Epoch 4, batch 950, loss[loss=0.2232, simple_loss=0.3045, pruned_loss=0.07096, over 18412.00 frames. ], tot_loss[loss=0.2531, simple_loss=0.3237, pruned_loss=0.09127, over 3579802.95 frames. ], batch size: 48, lr: 2.72e-02, grad_scale: 8.0 +2023-03-08 17:30:55,308 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8895, 4.8814, 4.1925, 4.7223, 4.6694, 4.2804, 4.7610, 4.3541], + device='cuda:1'), covar=tensor([0.0321, 0.0369, 0.1889, 0.0698, 0.0507, 0.0408, 0.0336, 0.0686], + device='cuda:1'), in_proj_covar=tensor([0.0255, 0.0276, 0.0439, 0.0230, 0.0211, 0.0257, 0.0274, 0.0327], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-08 17:31:05,091 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11877.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:31:06,067 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11878.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:31:18,406 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11888.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:31:20,871 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8417, 4.5001, 4.7474, 4.5596, 4.5494, 4.7804, 5.0459, 4.9479], + device='cuda:1'), covar=tensor([0.0074, 0.0134, 0.0105, 0.0115, 0.0092, 0.0087, 0.0098, 0.0109], + device='cuda:1'), in_proj_covar=tensor([0.0061, 0.0049, 0.0045, 0.0058, 0.0053, 0.0064, 0.0056, 0.0052], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 17:31:24,513 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6784, 1.6535, 4.3531, 2.6861, 3.5456, 4.7317, 4.3988, 4.3124], + device='cuda:1'), covar=tensor([0.0217, 0.0870, 0.0138, 0.0593, 0.0968, 0.0028, 0.0181, 0.0110], + device='cuda:1'), in_proj_covar=tensor([0.0119, 0.0156, 0.0087, 0.0153, 0.0240, 0.0086, 0.0128, 0.0116], + device='cuda:1'), out_proj_covar=tensor([9.7675e-05, 1.2723e-04, 7.6421e-05, 1.1564e-04, 1.9140e-04, 6.3300e-05, + 1.0564e-04, 9.2851e-05], device='cuda:1') +2023-03-08 17:31:27,616 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.5621, 5.1691, 5.2074, 5.0052, 4.8146, 4.9961, 4.3809, 4.9238], + device='cuda:1'), covar=tensor([0.0242, 0.0300, 0.0214, 0.0208, 0.0454, 0.0275, 0.1145, 0.0332], + device='cuda:1'), in_proj_covar=tensor([0.0108, 0.0146, 0.0136, 0.0120, 0.0145, 0.0144, 0.0211, 0.0131], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0004, 0.0004, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-08 17:31:35,205 INFO [train.py:898] (1/4) Epoch 4, batch 1000, loss[loss=0.2395, simple_loss=0.3148, pruned_loss=0.08207, over 18547.00 frames. ], tot_loss[loss=0.2527, simple_loss=0.3236, pruned_loss=0.09089, over 3583827.54 frames. ], batch size: 49, lr: 2.71e-02, grad_scale: 8.0 +2023-03-08 17:31:40,154 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11907.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:31:54,914 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11920.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:31:55,675 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.070e+02 4.938e+02 5.692e+02 6.896e+02 1.055e+03, threshold=1.138e+03, percent-clipped=0.0 +2023-03-08 17:32:28,281 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11948.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:32:33,420 INFO [train.py:898] (1/4) Epoch 4, batch 1050, loss[loss=0.2161, simple_loss=0.2846, pruned_loss=0.07379, over 17651.00 frames. ], tot_loss[loss=0.2545, simple_loss=0.3251, pruned_loss=0.09198, over 3580963.49 frames. ], batch size: 39, lr: 2.71e-02, grad_scale: 8.0 +2023-03-08 17:33:05,217 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11981.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:33:08,960 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11984.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:33:17,629 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11991.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:33:35,192 INFO [train.py:898] (1/4) Epoch 4, batch 1100, loss[loss=0.2386, simple_loss=0.2991, pruned_loss=0.08908, over 18157.00 frames. ], tot_loss[loss=0.2552, simple_loss=0.3257, pruned_loss=0.0924, over 3590012.88 frames. ], batch size: 44, lr: 2.70e-02, grad_scale: 4.0 +2023-03-08 17:33:42,494 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12009.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:33:51,580 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6490, 3.7619, 1.6730, 4.4861, 3.0520, 4.7620, 2.2764, 4.3993], + device='cuda:1'), covar=tensor([0.0530, 0.0828, 0.1870, 0.0345, 0.0979, 0.0088, 0.1288, 0.0231], + device='cuda:1'), in_proj_covar=tensor([0.0137, 0.0182, 0.0162, 0.0131, 0.0160, 0.0091, 0.0161, 0.0142], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 17:33:56,852 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.224e+02 5.074e+02 6.054e+02 6.990e+02 2.904e+03, threshold=1.211e+03, percent-clipped=5.0 +2023-03-08 17:34:25,534 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12045.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:34:34,328 INFO [train.py:898] (1/4) Epoch 4, batch 1150, loss[loss=0.2568, simple_loss=0.3295, pruned_loss=0.09201, over 18353.00 frames. ], tot_loss[loss=0.255, simple_loss=0.3257, pruned_loss=0.09218, over 3596126.48 frames. ], batch size: 56, lr: 2.70e-02, grad_scale: 4.0 +2023-03-08 17:34:36,785 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12055.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:35:00,953 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12076.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:35:19,037 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6206, 4.0936, 5.0380, 2.9766, 4.1778, 3.2321, 3.3630, 2.1922], + device='cuda:1'), covar=tensor([0.0613, 0.0344, 0.0037, 0.0396, 0.0449, 0.1121, 0.1083, 0.1222], + device='cuda:1'), in_proj_covar=tensor([0.0132, 0.0133, 0.0067, 0.0112, 0.0148, 0.0183, 0.0127, 0.0153], + device='cuda:1'), out_proj_covar=tensor([1.3258e-04, 1.3759e-04, 7.0148e-05, 1.1510e-04, 1.5399e-04, 1.8393e-04, + 1.4274e-04, 1.5282e-04], device='cuda:1') +2023-03-08 17:35:32,889 INFO [train.py:898] (1/4) Epoch 4, batch 1200, loss[loss=0.2872, simple_loss=0.3524, pruned_loss=0.111, over 18261.00 frames. ], tot_loss[loss=0.2564, simple_loss=0.3269, pruned_loss=0.09298, over 3584588.70 frames. ], batch size: 57, lr: 2.69e-02, grad_scale: 8.0 +2023-03-08 17:35:54,812 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.657e+02 4.866e+02 5.977e+02 7.705e+02 1.703e+03, threshold=1.195e+03, percent-clipped=4.0 +2023-03-08 17:36:11,038 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0687, 5.1691, 2.7549, 4.9376, 4.8314, 5.1526, 4.9413, 2.3934], + device='cuda:1'), covar=tensor([0.0162, 0.0057, 0.0728, 0.0072, 0.0072, 0.0071, 0.0107, 0.1237], + device='cuda:1'), in_proj_covar=tensor([0.0061, 0.0046, 0.0079, 0.0054, 0.0053, 0.0047, 0.0059, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0004], + device='cuda:1') +2023-03-08 17:36:12,314 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12137.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:36:31,699 INFO [train.py:898] (1/4) Epoch 4, batch 1250, loss[loss=0.2322, simple_loss=0.2937, pruned_loss=0.08539, over 18411.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.3266, pruned_loss=0.09296, over 3578906.28 frames. ], batch size: 43, lr: 2.69e-02, grad_scale: 8.0 +2023-03-08 17:36:37,755 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6727, 3.4407, 3.0643, 2.7958, 3.1363, 2.4744, 2.6149, 3.4891], + device='cuda:1'), covar=tensor([0.0029, 0.0089, 0.0094, 0.0133, 0.0092, 0.0197, 0.0206, 0.0086], + device='cuda:1'), in_proj_covar=tensor([0.0044, 0.0053, 0.0053, 0.0081, 0.0053, 0.0082, 0.0095, 0.0050], + device='cuda:1'), out_proj_covar=tensor([6.6829e-05, 8.5027e-05, 8.8627e-05, 1.2834e-04, 8.1986e-05, 1.3090e-04, + 1.5458e-04, 8.1164e-05], device='cuda:1') +2023-03-08 17:36:53,289 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12172.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:37:11,246 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12188.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:37:29,463 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12202.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:37:30,454 INFO [train.py:898] (1/4) Epoch 4, batch 1300, loss[loss=0.276, simple_loss=0.3442, pruned_loss=0.1039, over 18404.00 frames. ], tot_loss[loss=0.2562, simple_loss=0.3264, pruned_loss=0.09296, over 3576329.31 frames. ], batch size: 52, lr: 2.68e-02, grad_scale: 4.0 +2023-03-08 17:37:52,909 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.709e+02 4.849e+02 5.902e+02 7.729e+02 1.516e+03, threshold=1.180e+03, percent-clipped=2.0 +2023-03-08 17:38:07,784 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12236.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:38:29,656 INFO [train.py:898] (1/4) Epoch 4, batch 1350, loss[loss=0.2804, simple_loss=0.3553, pruned_loss=0.1028, over 18495.00 frames. ], tot_loss[loss=0.2558, simple_loss=0.3259, pruned_loss=0.09289, over 3566670.27 frames. ], batch size: 53, lr: 2.68e-02, grad_scale: 4.0 +2023-03-08 17:38:56,202 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12276.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:39:13,292 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12291.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:39:27,817 INFO [train.py:898] (1/4) Epoch 4, batch 1400, loss[loss=0.2839, simple_loss=0.36, pruned_loss=0.1039, over 18315.00 frames. ], tot_loss[loss=0.2553, simple_loss=0.3252, pruned_loss=0.09269, over 3575918.10 frames. ], batch size: 54, lr: 2.67e-02, grad_scale: 4.0 +2023-03-08 17:39:29,719 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12304.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:39:51,203 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.094e+02 5.084e+02 6.027e+02 7.946e+02 1.309e+03, threshold=1.205e+03, percent-clipped=1.0 +2023-03-08 17:40:09,875 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12339.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:40:11,038 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12340.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:40:26,789 INFO [train.py:898] (1/4) Epoch 4, batch 1450, loss[loss=0.2304, simple_loss=0.298, pruned_loss=0.08135, over 18488.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.3244, pruned_loss=0.09223, over 3569963.12 frames. ], batch size: 47, lr: 2.67e-02, grad_scale: 4.0 +2023-03-08 17:40:29,931 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12355.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:41:24,353 INFO [train.py:898] (1/4) Epoch 4, batch 1500, loss[loss=0.2214, simple_loss=0.2945, pruned_loss=0.07418, over 18276.00 frames. ], tot_loss[loss=0.2546, simple_loss=0.3248, pruned_loss=0.09223, over 3579709.18 frames. ], batch size: 47, lr: 2.66e-02, grad_scale: 4.0 +2023-03-08 17:41:24,587 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12403.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:41:48,088 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1330, 3.0155, 4.3596, 4.0054, 2.2253, 4.6373, 3.9903, 3.0475], + device='cuda:1'), covar=tensor([0.0193, 0.0681, 0.0060, 0.0175, 0.1224, 0.0055, 0.0204, 0.0722], + device='cuda:1'), in_proj_covar=tensor([0.0112, 0.0143, 0.0080, 0.0090, 0.0162, 0.0097, 0.0106, 0.0154], + device='cuda:1'), out_proj_covar=tensor([1.0932e-04, 1.3944e-04, 8.5527e-05, 8.8051e-05, 1.5659e-04, 9.1664e-05, + 1.1156e-04, 1.5556e-04], device='cuda:1') +2023-03-08 17:41:48,829 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.804e+02 4.633e+02 5.942e+02 7.166e+02 1.765e+03, threshold=1.188e+03, percent-clipped=4.0 +2023-03-08 17:41:49,198 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0141, 4.6198, 4.7931, 4.7488, 4.6797, 4.8757, 5.1748, 4.9018], + device='cuda:1'), covar=tensor([0.0052, 0.0132, 0.0141, 0.0090, 0.0126, 0.0080, 0.0077, 0.0134], + device='cuda:1'), in_proj_covar=tensor([0.0061, 0.0047, 0.0045, 0.0058, 0.0052, 0.0066, 0.0055, 0.0052], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 17:41:59,122 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12432.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:42:15,132 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12446.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:42:22,775 INFO [train.py:898] (1/4) Epoch 4, batch 1550, loss[loss=0.2254, simple_loss=0.298, pruned_loss=0.07636, over 18396.00 frames. ], tot_loss[loss=0.2541, simple_loss=0.3244, pruned_loss=0.09186, over 3583584.26 frames. ], batch size: 48, lr: 2.66e-02, grad_scale: 4.0 +2023-03-08 17:42:45,851 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12472.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:43:19,575 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12502.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:43:20,380 INFO [train.py:898] (1/4) Epoch 4, batch 1600, loss[loss=0.216, simple_loss=0.287, pruned_loss=0.07254, over 18417.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.325, pruned_loss=0.09186, over 3590197.76 frames. ], batch size: 48, lr: 2.65e-02, grad_scale: 8.0 +2023-03-08 17:43:25,771 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12507.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:43:33,560 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.6041, 5.0673, 5.6657, 5.6309, 5.4317, 6.2351, 5.7039, 5.5942], + device='cuda:1'), covar=tensor([0.0678, 0.0567, 0.0545, 0.0440, 0.1131, 0.0636, 0.0573, 0.1279], + device='cuda:1'), in_proj_covar=tensor([0.0219, 0.0160, 0.0173, 0.0170, 0.0216, 0.0246, 0.0162, 0.0241], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-08 17:43:35,240 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-08 17:43:41,780 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12520.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:43:44,950 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.642e+02 4.900e+02 6.087e+02 7.463e+02 1.966e+03, threshold=1.217e+03, percent-clipped=9.0 +2023-03-08 17:44:15,880 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12550.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:44:19,017 INFO [train.py:898] (1/4) Epoch 4, batch 1650, loss[loss=0.2213, simple_loss=0.2885, pruned_loss=0.07702, over 18240.00 frames. ], tot_loss[loss=0.2539, simple_loss=0.3248, pruned_loss=0.0915, over 3597711.62 frames. ], batch size: 45, lr: 2.65e-02, grad_scale: 8.0 +2023-03-08 17:44:48,267 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12576.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:45:18,521 INFO [train.py:898] (1/4) Epoch 4, batch 1700, loss[loss=0.2525, simple_loss=0.32, pruned_loss=0.09251, over 18407.00 frames. ], tot_loss[loss=0.2531, simple_loss=0.3239, pruned_loss=0.09116, over 3593906.78 frames. ], batch size: 48, lr: 2.65e-02, grad_scale: 8.0 +2023-03-08 17:45:19,926 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12604.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:45:43,562 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.156e+02 4.705e+02 5.777e+02 7.062e+02 1.643e+03, threshold=1.155e+03, percent-clipped=4.0 +2023-03-08 17:45:44,912 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12624.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:46:02,679 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12640.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:46:09,325 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7615, 5.2448, 5.3589, 5.0607, 4.9525, 5.1310, 4.6376, 5.0884], + device='cuda:1'), covar=tensor([0.0207, 0.0301, 0.0161, 0.0247, 0.0376, 0.0262, 0.1005, 0.0333], + device='cuda:1'), in_proj_covar=tensor([0.0113, 0.0155, 0.0136, 0.0128, 0.0146, 0.0154, 0.0218, 0.0137], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004], + device='cuda:1') +2023-03-08 17:46:09,482 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.3757, 3.8811, 5.1945, 4.5746, 3.1189, 3.2074, 4.5718, 5.3339], + device='cuda:1'), covar=tensor([0.0949, 0.0888, 0.0035, 0.0181, 0.0752, 0.0906, 0.0203, 0.0023], + device='cuda:1'), in_proj_covar=tensor([0.0122, 0.0120, 0.0059, 0.0116, 0.0145, 0.0148, 0.0116, 0.0056], + device='cuda:1'), out_proj_covar=tensor([2.0044e-04, 2.0607e-04, 1.0299e-04, 1.9016e-04, 2.2246e-04, 2.3223e-04, + 1.8856e-04, 9.1585e-05], device='cuda:1') +2023-03-08 17:46:15,921 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12652.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:46:16,914 INFO [train.py:898] (1/4) Epoch 4, batch 1750, loss[loss=0.2325, simple_loss=0.3049, pruned_loss=0.08005, over 18362.00 frames. ], tot_loss[loss=0.2522, simple_loss=0.3231, pruned_loss=0.09069, over 3586650.31 frames. ], batch size: 46, lr: 2.64e-02, grad_scale: 8.0 +2023-03-08 17:46:54,572 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8685, 3.8870, 5.0133, 3.2098, 4.1723, 2.9316, 3.2261, 1.8830], + device='cuda:1'), covar=tensor([0.0510, 0.0372, 0.0050, 0.0340, 0.0474, 0.1288, 0.1128, 0.1387], + device='cuda:1'), in_proj_covar=tensor([0.0140, 0.0141, 0.0070, 0.0116, 0.0159, 0.0194, 0.0143, 0.0163], + device='cuda:1'), out_proj_covar=tensor([1.3930e-04, 1.4538e-04, 7.4418e-05, 1.1734e-04, 1.6400e-04, 1.9374e-04, + 1.5802e-04, 1.6358e-04], device='cuda:1') +2023-03-08 17:46:58,851 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12688.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:47:15,807 INFO [train.py:898] (1/4) Epoch 4, batch 1800, loss[loss=0.2383, simple_loss=0.3171, pruned_loss=0.07977, over 18490.00 frames. ], tot_loss[loss=0.251, simple_loss=0.3221, pruned_loss=0.08995, over 3584144.96 frames. ], batch size: 51, lr: 2.64e-02, grad_scale: 8.0 +2023-03-08 17:47:39,919 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.294e+02 5.100e+02 5.998e+02 7.451e+02 2.129e+03, threshold=1.200e+03, percent-clipped=3.0 +2023-03-08 17:47:51,197 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12732.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:48:15,104 INFO [train.py:898] (1/4) Epoch 4, batch 1850, loss[loss=0.2912, simple_loss=0.353, pruned_loss=0.1147, over 17001.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.3205, pruned_loss=0.08861, over 3592696.27 frames. ], batch size: 78, lr: 2.63e-02, grad_scale: 8.0 +2023-03-08 17:48:26,885 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12763.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:48:47,864 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12780.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:49:12,686 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12802.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:49:13,592 INFO [train.py:898] (1/4) Epoch 4, batch 1900, loss[loss=0.2864, simple_loss=0.3554, pruned_loss=0.1087, over 16005.00 frames. ], tot_loss[loss=0.251, simple_loss=0.3219, pruned_loss=0.09012, over 3576559.04 frames. ], batch size: 94, lr: 2.63e-02, grad_scale: 8.0 +2023-03-08 17:49:20,754 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7951, 4.1475, 5.1938, 3.2928, 4.3997, 2.9411, 3.4553, 2.2185], + device='cuda:1'), covar=tensor([0.0536, 0.0366, 0.0050, 0.0340, 0.0398, 0.1365, 0.1046, 0.1288], + device='cuda:1'), in_proj_covar=tensor([0.0141, 0.0142, 0.0072, 0.0117, 0.0160, 0.0193, 0.0147, 0.0164], + device='cuda:1'), out_proj_covar=tensor([1.4021e-04, 1.4552e-04, 7.6514e-05, 1.1853e-04, 1.6516e-04, 1.9393e-04, + 1.6129e-04, 1.6448e-04], device='cuda:1') +2023-03-08 17:49:36,781 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.387e+02 4.784e+02 5.928e+02 6.975e+02 1.301e+03, threshold=1.186e+03, percent-clipped=1.0 +2023-03-08 17:49:38,299 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12824.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 17:50:11,763 INFO [train.py:898] (1/4) Epoch 4, batch 1950, loss[loss=0.2455, simple_loss=0.3311, pruned_loss=0.07995, over 18436.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.3213, pruned_loss=0.08961, over 3578377.11 frames. ], batch size: 48, lr: 2.62e-02, grad_scale: 8.0 +2023-03-08 17:50:37,636 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0195, 4.8582, 4.9529, 4.8682, 4.7381, 4.9055, 5.2301, 5.2373], + device='cuda:1'), covar=tensor([0.0050, 0.0094, 0.0082, 0.0069, 0.0083, 0.0082, 0.0076, 0.0085], + device='cuda:1'), in_proj_covar=tensor([0.0062, 0.0048, 0.0045, 0.0059, 0.0053, 0.0067, 0.0055, 0.0053], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 17:51:10,760 INFO [train.py:898] (1/4) Epoch 4, batch 2000, loss[loss=0.2393, simple_loss=0.3038, pruned_loss=0.08733, over 18419.00 frames. ], tot_loss[loss=0.2513, simple_loss=0.3222, pruned_loss=0.0902, over 3569098.88 frames. ], batch size: 48, lr: 2.62e-02, grad_scale: 8.0 +2023-03-08 17:51:33,471 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.610e+02 4.786e+02 5.613e+02 6.495e+02 1.703e+03, threshold=1.123e+03, percent-clipped=3.0 +2023-03-08 17:51:43,192 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.5887, 4.4934, 4.6562, 4.4286, 4.2834, 4.5193, 4.8003, 4.9923], + device='cuda:1'), covar=tensor([0.0069, 0.0126, 0.0100, 0.0120, 0.0106, 0.0108, 0.0204, 0.0153], + device='cuda:1'), in_proj_covar=tensor([0.0061, 0.0048, 0.0044, 0.0058, 0.0051, 0.0066, 0.0055, 0.0052], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 17:52:08,675 INFO [train.py:898] (1/4) Epoch 4, batch 2050, loss[loss=0.203, simple_loss=0.2737, pruned_loss=0.06617, over 18429.00 frames. ], tot_loss[loss=0.2515, simple_loss=0.3227, pruned_loss=0.09017, over 3581115.37 frames. ], batch size: 43, lr: 2.61e-02, grad_scale: 8.0 +2023-03-08 17:53:07,625 INFO [train.py:898] (1/4) Epoch 4, batch 2100, loss[loss=0.2581, simple_loss=0.3023, pruned_loss=0.1069, over 17709.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3223, pruned_loss=0.09023, over 3582380.48 frames. ], batch size: 39, lr: 2.61e-02, grad_scale: 8.0 +2023-03-08 17:53:29,882 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.835e+02 4.911e+02 6.247e+02 7.223e+02 1.206e+03, threshold=1.249e+03, percent-clipped=2.0 +2023-03-08 17:53:50,179 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6468, 1.5527, 4.2045, 2.9398, 3.8004, 5.2127, 4.4148, 4.4590], + device='cuda:1'), covar=tensor([0.0239, 0.0958, 0.0268, 0.0485, 0.0872, 0.0022, 0.0198, 0.0143], + device='cuda:1'), in_proj_covar=tensor([0.0126, 0.0167, 0.0101, 0.0162, 0.0251, 0.0093, 0.0137, 0.0124], + device='cuda:1'), out_proj_covar=tensor([1.0096e-04, 1.3294e-04, 8.9463e-05, 1.1955e-04, 1.9798e-04, 6.7809e-05, + 1.0981e-04, 9.8583e-05], device='cuda:1') +2023-03-08 17:54:06,017 INFO [train.py:898] (1/4) Epoch 4, batch 2150, loss[loss=0.2538, simple_loss=0.3242, pruned_loss=0.09163, over 18053.00 frames. ], tot_loss[loss=0.2513, simple_loss=0.3221, pruned_loss=0.0902, over 3587020.02 frames. ], batch size: 65, lr: 2.61e-02, grad_scale: 8.0 +2023-03-08 17:54:24,725 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-08 17:54:58,894 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.04 vs. limit=2.0 +2023-03-08 17:55:03,543 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=13102.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:55:04,394 INFO [train.py:898] (1/4) Epoch 4, batch 2200, loss[loss=0.2286, simple_loss=0.2985, pruned_loss=0.07941, over 17716.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3208, pruned_loss=0.08947, over 3598954.46 frames. ], batch size: 39, lr: 2.60e-02, grad_scale: 8.0 +2023-03-08 17:55:22,893 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=13119.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 17:55:27,155 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.452e+02 4.789e+02 5.834e+02 7.042e+02 2.257e+03, threshold=1.167e+03, percent-clipped=4.0 +2023-03-08 17:55:30,916 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7436, 2.4545, 3.8246, 3.7788, 1.6329, 4.2023, 3.8437, 2.5103], + device='cuda:1'), covar=tensor([0.0270, 0.1404, 0.0169, 0.0210, 0.2431, 0.0151, 0.0296, 0.1362], + device='cuda:1'), in_proj_covar=tensor([0.0123, 0.0153, 0.0087, 0.0097, 0.0171, 0.0108, 0.0121, 0.0162], + device='cuda:1'), out_proj_covar=tensor([1.1931e-04, 1.4962e-04, 9.1725e-05, 9.3547e-05, 1.6499e-04, 1.0157e-04, + 1.2864e-04, 1.6238e-04], device='cuda:1') +2023-03-08 17:55:48,882 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7267, 3.4549, 1.8689, 4.3893, 3.0272, 4.8369, 2.4248, 3.9204], + device='cuda:1'), covar=tensor([0.0472, 0.0792, 0.1510, 0.0273, 0.0846, 0.0070, 0.1027, 0.0327], + device='cuda:1'), in_proj_covar=tensor([0.0140, 0.0176, 0.0157, 0.0137, 0.0155, 0.0096, 0.0158, 0.0144], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 17:55:59,504 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=13150.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:56:02,801 INFO [train.py:898] (1/4) Epoch 4, batch 2250, loss[loss=0.2388, simple_loss=0.3224, pruned_loss=0.07759, over 18486.00 frames. ], tot_loss[loss=0.2515, simple_loss=0.3226, pruned_loss=0.09023, over 3592647.53 frames. ], batch size: 53, lr: 2.60e-02, grad_scale: 8.0 +2023-03-08 17:57:01,388 INFO [train.py:898] (1/4) Epoch 4, batch 2300, loss[loss=0.2601, simple_loss=0.3226, pruned_loss=0.09883, over 17579.00 frames. ], tot_loss[loss=0.2523, simple_loss=0.3233, pruned_loss=0.09068, over 3597398.64 frames. ], batch size: 39, lr: 2.59e-02, grad_scale: 8.0 +2023-03-08 17:57:24,771 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.647e+02 4.949e+02 6.048e+02 7.391e+02 1.554e+03, threshold=1.210e+03, percent-clipped=6.0 +2023-03-08 17:57:37,774 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9425, 4.0924, 5.0865, 3.2569, 4.3220, 3.3514, 3.2953, 2.3097], + device='cuda:1'), covar=tensor([0.0480, 0.0377, 0.0035, 0.0358, 0.0380, 0.1093, 0.1045, 0.1176], + device='cuda:1'), in_proj_covar=tensor([0.0140, 0.0142, 0.0072, 0.0117, 0.0159, 0.0193, 0.0148, 0.0162], + device='cuda:1'), out_proj_covar=tensor([1.3840e-04, 1.4508e-04, 7.6276e-05, 1.1914e-04, 1.6346e-04, 1.9396e-04, + 1.6281e-04, 1.6250e-04], device='cuda:1') +2023-03-08 17:57:47,478 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8936, 2.8806, 4.0627, 3.7448, 1.9539, 4.5067, 4.0660, 2.6308], + device='cuda:1'), covar=tensor([0.0214, 0.0836, 0.0164, 0.0207, 0.1612, 0.0085, 0.0204, 0.0982], + device='cuda:1'), in_proj_covar=tensor([0.0123, 0.0151, 0.0086, 0.0098, 0.0170, 0.0108, 0.0118, 0.0159], + device='cuda:1'), out_proj_covar=tensor([1.1966e-04, 1.4764e-04, 9.0979e-05, 9.4805e-05, 1.6271e-04, 1.0201e-04, + 1.2521e-04, 1.6043e-04], device='cuda:1') +2023-03-08 17:58:00,616 INFO [train.py:898] (1/4) Epoch 4, batch 2350, loss[loss=0.2485, simple_loss=0.3199, pruned_loss=0.0886, over 18393.00 frames. ], tot_loss[loss=0.2526, simple_loss=0.3236, pruned_loss=0.09075, over 3581998.00 frames. ], batch size: 52, lr: 2.59e-02, grad_scale: 8.0 +2023-03-08 17:58:58,988 INFO [train.py:898] (1/4) Epoch 4, batch 2400, loss[loss=0.2703, simple_loss=0.3392, pruned_loss=0.1007, over 18371.00 frames. ], tot_loss[loss=0.2519, simple_loss=0.3231, pruned_loss=0.09033, over 3594568.68 frames. ], batch size: 56, lr: 2.58e-02, grad_scale: 8.0 +2023-03-08 17:59:23,022 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.316e+02 4.704e+02 5.884e+02 7.120e+02 1.441e+03, threshold=1.177e+03, percent-clipped=2.0 +2023-03-08 17:59:36,398 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-08 17:59:58,172 INFO [train.py:898] (1/4) Epoch 4, batch 2450, loss[loss=0.2409, simple_loss=0.3105, pruned_loss=0.08563, over 18272.00 frames. ], tot_loss[loss=0.2517, simple_loss=0.323, pruned_loss=0.09023, over 3604425.67 frames. ], batch size: 47, lr: 2.58e-02, grad_scale: 8.0 +2023-03-08 18:00:16,861 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7645, 2.0947, 1.9690, 2.4106, 2.6795, 2.6499, 2.3519, 2.5572], + device='cuda:1'), covar=tensor([0.0273, 0.0247, 0.0980, 0.0462, 0.0216, 0.0185, 0.0477, 0.0235], + device='cuda:1'), in_proj_covar=tensor([0.0087, 0.0064, 0.0127, 0.0096, 0.0070, 0.0053, 0.0084, 0.0080], + device='cuda:1'), out_proj_covar=tensor([1.6779e-04, 1.2740e-04, 2.2769e-04, 1.7785e-04, 1.4009e-04, 9.7048e-05, + 1.6245e-04, 1.5396e-04], device='cuda:1') +2023-03-08 18:00:55,676 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1355, 5.2510, 2.9076, 5.1298, 5.1080, 5.3657, 4.9657, 2.4074], + device='cuda:1'), covar=tensor([0.0158, 0.0089, 0.0758, 0.0065, 0.0057, 0.0064, 0.0131, 0.1360], + device='cuda:1'), in_proj_covar=tensor([0.0060, 0.0046, 0.0076, 0.0054, 0.0053, 0.0046, 0.0057, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0004], + device='cuda:1') +2023-03-08 18:00:56,448 INFO [train.py:898] (1/4) Epoch 4, batch 2500, loss[loss=0.2524, simple_loss=0.3219, pruned_loss=0.09147, over 18281.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3234, pruned_loss=0.09008, over 3603475.29 frames. ], batch size: 49, lr: 2.58e-02, grad_scale: 8.0 +2023-03-08 18:01:16,454 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=13419.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:01:18,177 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-08 18:01:20,738 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.373e+02 5.229e+02 6.294e+02 7.709e+02 1.447e+03, threshold=1.259e+03, percent-clipped=4.0 +2023-03-08 18:01:55,102 INFO [train.py:898] (1/4) Epoch 4, batch 2550, loss[loss=0.223, simple_loss=0.3056, pruned_loss=0.07023, over 18502.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.3219, pruned_loss=0.08934, over 3605461.32 frames. ], batch size: 47, lr: 2.57e-02, grad_scale: 8.0 +2023-03-08 18:02:12,155 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=13467.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:02:48,735 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7065, 5.1701, 4.8924, 4.9406, 4.7305, 4.8543, 5.2415, 5.1346], + device='cuda:1'), covar=tensor([0.1073, 0.0653, 0.0721, 0.0726, 0.1609, 0.0648, 0.0525, 0.0647], + device='cuda:1'), in_proj_covar=tensor([0.0367, 0.0308, 0.0234, 0.0328, 0.0462, 0.0332, 0.0360, 0.0301], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:1') +2023-03-08 18:02:53,615 INFO [train.py:898] (1/4) Epoch 4, batch 2600, loss[loss=0.25, simple_loss=0.3283, pruned_loss=0.08584, over 17992.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3224, pruned_loss=0.0895, over 3598756.71 frames. ], batch size: 65, lr: 2.57e-02, grad_scale: 8.0 +2023-03-08 18:03:17,325 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.093e+02 4.722e+02 5.886e+02 7.226e+02 1.343e+03, threshold=1.177e+03, percent-clipped=1.0 +2023-03-08 18:03:35,868 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-08 18:03:42,181 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6289, 4.1688, 4.1809, 3.0024, 3.3790, 3.0561, 2.3614, 1.6459], + device='cuda:1'), covar=tensor([0.0151, 0.0157, 0.0065, 0.0274, 0.0295, 0.0185, 0.0681, 0.0959], + device='cuda:1'), in_proj_covar=tensor([0.0035, 0.0035, 0.0028, 0.0040, 0.0056, 0.0031, 0.0058, 0.0062], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 18:03:51,840 INFO [train.py:898] (1/4) Epoch 4, batch 2650, loss[loss=0.2505, simple_loss=0.3076, pruned_loss=0.09668, over 18438.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.3216, pruned_loss=0.08895, over 3590813.00 frames. ], batch size: 42, lr: 2.56e-02, grad_scale: 8.0 +2023-03-08 18:04:22,570 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7277, 3.5241, 3.0580, 2.9867, 3.3030, 2.4774, 2.3607, 3.5980], + device='cuda:1'), covar=tensor([0.0032, 0.0106, 0.0119, 0.0119, 0.0073, 0.0179, 0.0216, 0.0075], + device='cuda:1'), in_proj_covar=tensor([0.0049, 0.0060, 0.0057, 0.0090, 0.0056, 0.0091, 0.0103, 0.0054], + device='cuda:1'), out_proj_covar=tensor([7.4184e-05, 9.6750e-05, 9.3826e-05, 1.4699e-04, 8.8169e-05, 1.4628e-04, + 1.6747e-04, 8.7393e-05], device='cuda:1') +2023-03-08 18:04:25,000 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.83 vs. limit=2.0 +2023-03-08 18:04:43,276 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.59 vs. limit=5.0 +2023-03-08 18:04:50,571 INFO [train.py:898] (1/4) Epoch 4, batch 2700, loss[loss=0.2604, simple_loss=0.3268, pruned_loss=0.09702, over 18175.00 frames. ], tot_loss[loss=0.2515, simple_loss=0.323, pruned_loss=0.08998, over 3580525.21 frames. ], batch size: 62, lr: 2.56e-02, grad_scale: 8.0 +2023-03-08 18:05:14,651 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.306e+02 5.163e+02 6.316e+02 7.971e+02 1.590e+03, threshold=1.263e+03, percent-clipped=4.0 +2023-03-08 18:05:48,675 INFO [train.py:898] (1/4) Epoch 4, batch 2750, loss[loss=0.2324, simple_loss=0.3077, pruned_loss=0.07852, over 18302.00 frames. ], tot_loss[loss=0.2514, simple_loss=0.3223, pruned_loss=0.09026, over 3576909.44 frames. ], batch size: 49, lr: 2.55e-02, grad_scale: 8.0 +2023-03-08 18:06:47,203 INFO [train.py:898] (1/4) Epoch 4, batch 2800, loss[loss=0.2988, simple_loss=0.3649, pruned_loss=0.1163, over 18359.00 frames. ], tot_loss[loss=0.2524, simple_loss=0.3235, pruned_loss=0.0906, over 3576064.45 frames. ], batch size: 56, lr: 2.55e-02, grad_scale: 8.0 +2023-03-08 18:07:03,986 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3880, 1.7829, 3.8964, 2.7676, 3.2973, 5.0197, 4.2721, 4.3536], + device='cuda:1'), covar=tensor([0.0294, 0.0954, 0.0324, 0.0637, 0.1174, 0.0025, 0.0227, 0.0158], + device='cuda:1'), in_proj_covar=tensor([0.0132, 0.0173, 0.0116, 0.0169, 0.0257, 0.0094, 0.0143, 0.0128], + device='cuda:1'), out_proj_covar=tensor([1.0571e-04, 1.3664e-04, 9.9392e-05, 1.2261e-04, 1.9995e-04, 6.6527e-05, + 1.1280e-04, 9.9759e-05], device='cuda:1') +2023-03-08 18:07:10,812 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.3556, 5.4722, 3.5151, 5.0839, 5.2381, 5.5519, 5.3147, 3.0529], + device='cuda:1'), covar=tensor([0.0116, 0.0045, 0.0460, 0.0066, 0.0050, 0.0035, 0.0075, 0.0827], + device='cuda:1'), in_proj_covar=tensor([0.0060, 0.0048, 0.0080, 0.0057, 0.0055, 0.0048, 0.0061, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0004], + device='cuda:1') +2023-03-08 18:07:11,546 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.506e+02 4.982e+02 6.101e+02 7.616e+02 1.473e+03, threshold=1.220e+03, percent-clipped=5.0 +2023-03-08 18:07:28,149 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=13737.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:07:45,692 INFO [train.py:898] (1/4) Epoch 4, batch 2850, loss[loss=0.2561, simple_loss=0.3299, pruned_loss=0.09121, over 18627.00 frames. ], tot_loss[loss=0.2517, simple_loss=0.3227, pruned_loss=0.09029, over 3583346.70 frames. ], batch size: 52, lr: 2.55e-02, grad_scale: 8.0 +2023-03-08 18:08:38,500 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=13798.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 18:08:44,016 INFO [train.py:898] (1/4) Epoch 4, batch 2900, loss[loss=0.2459, simple_loss=0.3047, pruned_loss=0.09355, over 18380.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3223, pruned_loss=0.08979, over 3588162.13 frames. ], batch size: 46, lr: 2.54e-02, grad_scale: 8.0 +2023-03-08 18:09:01,462 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2913, 5.2732, 4.6869, 5.1697, 5.2270, 4.6971, 5.1564, 4.8151], + device='cuda:1'), covar=tensor([0.0356, 0.0372, 0.1554, 0.0666, 0.0331, 0.0392, 0.0363, 0.0716], + device='cuda:1'), in_proj_covar=tensor([0.0271, 0.0300, 0.0455, 0.0239, 0.0227, 0.0279, 0.0301, 0.0363], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0003, 0.0003, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 18:09:07,286 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.361e+02 5.191e+02 6.530e+02 8.692e+02 2.187e+03, threshold=1.306e+03, percent-clipped=7.0 +2023-03-08 18:09:43,218 INFO [train.py:898] (1/4) Epoch 4, batch 2950, loss[loss=0.3205, simple_loss=0.377, pruned_loss=0.132, over 16257.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.3222, pruned_loss=0.08999, over 3571182.72 frames. ], batch size: 94, lr: 2.54e-02, grad_scale: 8.0 +2023-03-08 18:09:55,187 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.66 vs. limit=2.0 +2023-03-08 18:10:08,562 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9478, 2.8193, 4.3138, 4.0975, 2.0933, 4.6018, 3.9911, 2.6873], + device='cuda:1'), covar=tensor([0.0252, 0.0914, 0.0076, 0.0161, 0.1768, 0.0082, 0.0267, 0.0952], + device='cuda:1'), in_proj_covar=tensor([0.0127, 0.0155, 0.0085, 0.0099, 0.0174, 0.0114, 0.0128, 0.0162], + device='cuda:1'), out_proj_covar=tensor([1.2345e-04, 1.5145e-04, 8.7864e-05, 9.6472e-05, 1.6654e-04, 1.0687e-04, + 1.3346e-04, 1.6323e-04], device='cuda:1') +2023-03-08 18:10:40,649 INFO [train.py:898] (1/4) Epoch 4, batch 3000, loss[loss=0.2384, simple_loss=0.3173, pruned_loss=0.07973, over 18311.00 frames. ], tot_loss[loss=0.251, simple_loss=0.3223, pruned_loss=0.08981, over 3580855.04 frames. ], batch size: 54, lr: 2.53e-02, grad_scale: 4.0 +2023-03-08 18:10:40,650 INFO [train.py:923] (1/4) Computing validation loss +2023-03-08 18:10:49,742 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.1445, 3.5800, 4.7322, 4.0233, 2.8725, 2.6459, 4.1044, 4.7022], + device='cuda:1'), covar=tensor([0.1025, 0.0990, 0.0054, 0.0261, 0.0856, 0.1007, 0.0260, 0.0041], + device='cuda:1'), in_proj_covar=tensor([0.0129, 0.0143, 0.0061, 0.0125, 0.0152, 0.0156, 0.0127, 0.0065], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-08 18:10:52,600 INFO [train.py:932] (1/4) Epoch 4, validation: loss=0.1898, simple_loss=0.292, pruned_loss=0.04378, over 944034.00 frames. +2023-03-08 18:10:52,601 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19608MB +2023-03-08 18:11:17,315 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.366e+02 5.126e+02 6.271e+02 8.521e+02 2.590e+03, threshold=1.254e+03, percent-clipped=11.0 +2023-03-08 18:11:50,815 INFO [train.py:898] (1/4) Epoch 4, batch 3050, loss[loss=0.2543, simple_loss=0.3353, pruned_loss=0.08665, over 18362.00 frames. ], tot_loss[loss=0.2503, simple_loss=0.3219, pruned_loss=0.08938, over 3584009.49 frames. ], batch size: 56, lr: 2.53e-02, grad_scale: 4.0 +2023-03-08 18:11:56,561 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4128, 5.9577, 5.3821, 5.8831, 5.3782, 5.5810, 6.1000, 5.9770], + device='cuda:1'), covar=tensor([0.0944, 0.0560, 0.0370, 0.0502, 0.1488, 0.0585, 0.0409, 0.0569], + device='cuda:1'), in_proj_covar=tensor([0.0377, 0.0308, 0.0231, 0.0332, 0.0474, 0.0335, 0.0360, 0.0313], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:1') +2023-03-08 18:12:53,755 INFO [train.py:898] (1/4) Epoch 4, batch 3100, loss[loss=0.2591, simple_loss=0.337, pruned_loss=0.09057, over 18311.00 frames. ], tot_loss[loss=0.2512, simple_loss=0.3225, pruned_loss=0.08994, over 3572398.27 frames. ], batch size: 57, lr: 2.53e-02, grad_scale: 4.0 +2023-03-08 18:13:04,705 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-08 18:13:16,607 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6747, 4.0408, 4.3613, 3.5953, 3.4254, 3.4933, 2.1159, 1.7806], + device='cuda:1'), covar=tensor([0.0139, 0.0194, 0.0037, 0.0151, 0.0259, 0.0128, 0.0703, 0.0928], + device='cuda:1'), in_proj_covar=tensor([0.0034, 0.0034, 0.0026, 0.0038, 0.0055, 0.0030, 0.0055, 0.0060], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 18:13:18,985 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.897e+02 4.672e+02 5.881e+02 7.283e+02 1.291e+03, threshold=1.176e+03, percent-clipped=1.0 +2023-03-08 18:13:52,238 INFO [train.py:898] (1/4) Epoch 4, batch 3150, loss[loss=0.2675, simple_loss=0.3464, pruned_loss=0.09428, over 17970.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3219, pruned_loss=0.08925, over 3581075.79 frames. ], batch size: 65, lr: 2.52e-02, grad_scale: 4.0 +2023-03-08 18:13:52,490 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4496, 5.9063, 5.4127, 5.7193, 5.3504, 5.5046, 5.9915, 5.8674], + device='cuda:1'), covar=tensor([0.0923, 0.0544, 0.0433, 0.0580, 0.1671, 0.0583, 0.0462, 0.0569], + device='cuda:1'), in_proj_covar=tensor([0.0371, 0.0308, 0.0234, 0.0334, 0.0469, 0.0336, 0.0363, 0.0311], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:1') +2023-03-08 18:14:07,416 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.5916, 4.4223, 4.7075, 4.4872, 4.4622, 4.4885, 4.9491, 4.8246], + device='cuda:1'), covar=tensor([0.0072, 0.0087, 0.0073, 0.0087, 0.0068, 0.0100, 0.0079, 0.0111], + device='cuda:1'), in_proj_covar=tensor([0.0061, 0.0046, 0.0045, 0.0059, 0.0051, 0.0066, 0.0055, 0.0054], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 18:14:15,390 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14072.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:14:40,063 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14093.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 18:14:51,080 INFO [train.py:898] (1/4) Epoch 4, batch 3200, loss[loss=0.2909, simple_loss=0.3547, pruned_loss=0.1135, over 17986.00 frames. ], tot_loss[loss=0.2484, simple_loss=0.3203, pruned_loss=0.08827, over 3592497.33 frames. ], batch size: 65, lr: 2.52e-02, grad_scale: 8.0 +2023-03-08 18:14:51,414 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14103.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:15:10,620 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14120.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 18:15:14,750 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.167e+02 5.012e+02 5.863e+02 7.076e+02 1.938e+03, threshold=1.173e+03, percent-clipped=6.0 +2023-03-08 18:15:22,928 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5894, 3.9194, 4.0453, 3.9830, 3.7505, 3.9110, 3.3253, 3.9382], + device='cuda:1'), covar=tensor([0.0345, 0.0482, 0.0341, 0.0347, 0.0521, 0.0318, 0.1517, 0.0342], + device='cuda:1'), in_proj_covar=tensor([0.0120, 0.0157, 0.0138, 0.0130, 0.0152, 0.0156, 0.0227, 0.0141], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004], + device='cuda:1') +2023-03-08 18:15:26,326 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14133.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:15:49,278 INFO [train.py:898] (1/4) Epoch 4, batch 3250, loss[loss=0.2358, simple_loss=0.3148, pruned_loss=0.07841, over 18253.00 frames. ], tot_loss[loss=0.2485, simple_loss=0.3201, pruned_loss=0.08847, over 3582216.74 frames. ], batch size: 57, lr: 2.51e-02, grad_scale: 8.0 +2023-03-08 18:15:52,037 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1924, 3.3583, 2.5078, 3.4905, 4.2104, 2.3865, 3.3481, 3.2980], + device='cuda:1'), covar=tensor([0.0063, 0.0913, 0.0980, 0.0388, 0.0048, 0.0980, 0.0489, 0.0501], + device='cuda:1'), in_proj_covar=tensor([0.0077, 0.0145, 0.0167, 0.0158, 0.0070, 0.0156, 0.0176, 0.0167], + device='cuda:1'), out_proj_covar=tensor([1.0574e-04, 2.0692e-04, 2.1257e-04, 2.0804e-04, 9.4709e-05, 2.0662e-04, + 2.2425e-04, 2.2247e-04], device='cuda:1') +2023-03-08 18:16:01,780 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14164.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:16:22,259 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14181.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 18:16:47,519 INFO [train.py:898] (1/4) Epoch 4, batch 3300, loss[loss=0.2372, simple_loss=0.3039, pruned_loss=0.08519, over 18408.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.3197, pruned_loss=0.0884, over 3566023.81 frames. ], batch size: 48, lr: 2.51e-02, grad_scale: 8.0 +2023-03-08 18:16:57,772 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1850, 5.7635, 5.2239, 5.4743, 5.1388, 5.2854, 5.8168, 5.7280], + device='cuda:1'), covar=tensor([0.1061, 0.0474, 0.0400, 0.0588, 0.1357, 0.0575, 0.0434, 0.0467], + device='cuda:1'), in_proj_covar=tensor([0.0369, 0.0305, 0.0230, 0.0329, 0.0461, 0.0326, 0.0359, 0.0303], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:1') +2023-03-08 18:17:10,862 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.117e+02 4.913e+02 6.024e+02 7.201e+02 1.529e+03, threshold=1.205e+03, percent-clipped=5.0 +2023-03-08 18:17:45,355 INFO [train.py:898] (1/4) Epoch 4, batch 3350, loss[loss=0.2567, simple_loss=0.3258, pruned_loss=0.0938, over 17883.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3195, pruned_loss=0.08812, over 3572533.63 frames. ], batch size: 70, lr: 2.51e-02, grad_scale: 8.0 +2023-03-08 18:18:01,243 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.62 vs. limit=2.0 +2023-03-08 18:18:04,385 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2406, 3.1955, 4.3899, 4.3047, 2.7584, 4.7202, 3.8657, 3.0032], + device='cuda:1'), covar=tensor([0.0198, 0.0723, 0.0084, 0.0133, 0.1274, 0.0083, 0.0420, 0.0801], + device='cuda:1'), in_proj_covar=tensor([0.0125, 0.0157, 0.0087, 0.0099, 0.0174, 0.0114, 0.0124, 0.0163], + device='cuda:1'), out_proj_covar=tensor([1.2138e-04, 1.5406e-04, 9.0414e-05, 9.5826e-05, 1.6691e-04, 1.0791e-04, + 1.2930e-04, 1.6349e-04], device='cuda:1') +2023-03-08 18:18:21,311 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-08 18:18:29,226 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.73 vs. limit=5.0 +2023-03-08 18:18:44,673 INFO [train.py:898] (1/4) Epoch 4, batch 3400, loss[loss=0.2264, simple_loss=0.3142, pruned_loss=0.06928, over 18560.00 frames. ], tot_loss[loss=0.2479, simple_loss=0.3196, pruned_loss=0.08815, over 3558800.58 frames. ], batch size: 54, lr: 2.50e-02, grad_scale: 8.0 +2023-03-08 18:18:56,487 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6368, 2.3811, 4.1903, 3.8118, 1.9340, 4.3435, 3.6716, 2.4032], + device='cuda:1'), covar=tensor([0.0303, 0.1103, 0.0080, 0.0200, 0.1707, 0.0079, 0.0300, 0.1054], + device='cuda:1'), in_proj_covar=tensor([0.0126, 0.0160, 0.0088, 0.0100, 0.0176, 0.0116, 0.0124, 0.0164], + device='cuda:1'), out_proj_covar=tensor([1.2258e-04, 1.5642e-04, 9.0864e-05, 9.7110e-05, 1.6872e-04, 1.1025e-04, + 1.2964e-04, 1.6471e-04], device='cuda:1') +2023-03-08 18:19:08,454 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.677e+02 5.094e+02 5.876e+02 7.439e+02 2.634e+03, threshold=1.175e+03, percent-clipped=7.0 +2023-03-08 18:19:39,313 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0783, 1.8710, 3.6593, 2.8691, 3.5054, 4.9261, 4.3466, 4.1882], + device='cuda:1'), covar=tensor([0.0328, 0.0879, 0.0469, 0.0563, 0.0927, 0.0028, 0.0189, 0.0157], + device='cuda:1'), in_proj_covar=tensor([0.0140, 0.0181, 0.0125, 0.0176, 0.0264, 0.0098, 0.0151, 0.0129], + device='cuda:1'), out_proj_covar=tensor([1.0958e-04, 1.4226e-04, 1.0730e-04, 1.2681e-04, 2.0328e-04, 7.0029e-05, + 1.1795e-04, 1.0049e-04], device='cuda:1') +2023-03-08 18:19:42,065 INFO [train.py:898] (1/4) Epoch 4, batch 3450, loss[loss=0.244, simple_loss=0.325, pruned_loss=0.08149, over 18362.00 frames. ], tot_loss[loss=0.2475, simple_loss=0.319, pruned_loss=0.08806, over 3567737.54 frames. ], batch size: 55, lr: 2.50e-02, grad_scale: 4.0 +2023-03-08 18:20:29,256 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14393.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:20:40,689 INFO [train.py:898] (1/4) Epoch 4, batch 3500, loss[loss=0.2085, simple_loss=0.2788, pruned_loss=0.06913, over 18372.00 frames. ], tot_loss[loss=0.2483, simple_loss=0.3196, pruned_loss=0.08851, over 3558463.30 frames. ], batch size: 42, lr: 2.49e-02, grad_scale: 4.0 +2023-03-08 18:20:52,861 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14413.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:21:05,563 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.205e+02 4.879e+02 5.762e+02 7.140e+02 1.904e+03, threshold=1.152e+03, percent-clipped=3.0 +2023-03-08 18:21:09,070 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14428.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:21:22,860 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14441.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:21:27,196 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2728, 5.8024, 5.2417, 5.5996, 5.2302, 5.4647, 5.7938, 5.7366], + device='cuda:1'), covar=tensor([0.1104, 0.0556, 0.0460, 0.0687, 0.1379, 0.0573, 0.0503, 0.0579], + device='cuda:1'), in_proj_covar=tensor([0.0382, 0.0310, 0.0236, 0.0338, 0.0474, 0.0334, 0.0362, 0.0311], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:1') +2023-03-08 18:21:35,674 INFO [train.py:898] (1/4) Epoch 4, batch 3550, loss[loss=0.2206, simple_loss=0.2919, pruned_loss=0.07467, over 18584.00 frames. ], tot_loss[loss=0.2469, simple_loss=0.3183, pruned_loss=0.08772, over 3568443.17 frames. ], batch size: 45, lr: 2.49e-02, grad_scale: 4.0 +2023-03-08 18:21:42,313 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14459.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:21:58,335 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14474.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:22:00,287 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14476.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 18:22:01,534 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1841, 4.4067, 2.3957, 4.6398, 5.2223, 2.4520, 4.0527, 4.1625], + device='cuda:1'), covar=tensor([0.0069, 0.0817, 0.1564, 0.0395, 0.0043, 0.1383, 0.0562, 0.0673], + device='cuda:1'), in_proj_covar=tensor([0.0079, 0.0152, 0.0172, 0.0162, 0.0073, 0.0160, 0.0180, 0.0171], + device='cuda:1'), out_proj_covar=tensor([1.0916e-04, 2.1638e-04, 2.2069e-04, 2.1479e-04, 9.8716e-05, 2.1355e-04, + 2.3054e-04, 2.2863e-04], device='cuda:1') +2023-03-08 18:22:13,071 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14487.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:22:30,857 INFO [train.py:898] (1/4) Epoch 4, batch 3600, loss[loss=0.2241, simple_loss=0.294, pruned_loss=0.07713, over 18295.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.3176, pruned_loss=0.08733, over 3572862.95 frames. ], batch size: 49, lr: 2.49e-02, grad_scale: 8.0 +2023-03-08 18:22:53,557 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.017e+02 5.164e+02 6.546e+02 8.016e+02 1.916e+03, threshold=1.309e+03, percent-clipped=5.0 +2023-03-08 18:23:35,181 INFO [train.py:898] (1/4) Epoch 5, batch 0, loss[loss=0.309, simple_loss=0.3633, pruned_loss=0.1274, over 18364.00 frames. ], tot_loss[loss=0.309, simple_loss=0.3633, pruned_loss=0.1274, over 18364.00 frames. ], batch size: 56, lr: 2.31e-02, grad_scale: 8.0 +2023-03-08 18:23:35,182 INFO [train.py:923] (1/4) Computing validation loss +2023-03-08 18:23:46,757 INFO [train.py:932] (1/4) Epoch 5, validation: loss=0.1908, simple_loss=0.2926, pruned_loss=0.04454, over 944034.00 frames. +2023-03-08 18:23:46,758 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19608MB +2023-03-08 18:23:59,435 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14548.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:24:44,281 INFO [train.py:898] (1/4) Epoch 5, batch 50, loss[loss=0.2122, simple_loss=0.2898, pruned_loss=0.06731, over 18276.00 frames. ], tot_loss[loss=0.2541, simple_loss=0.3259, pruned_loss=0.09116, over 805998.31 frames. ], batch size: 47, lr: 2.31e-02, grad_scale: 8.0 +2023-03-08 18:25:29,119 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.345e+02 4.923e+02 5.805e+02 7.432e+02 1.503e+03, threshold=1.161e+03, percent-clipped=2.0 +2023-03-08 18:25:34,331 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-08 18:25:43,380 INFO [train.py:898] (1/4) Epoch 5, batch 100, loss[loss=0.248, simple_loss=0.3165, pruned_loss=0.08978, over 18135.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.3174, pruned_loss=0.08563, over 1435577.68 frames. ], batch size: 62, lr: 2.31e-02, grad_scale: 8.0 +2023-03-08 18:25:52,888 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([1.9906, 3.5971, 4.9200, 3.6354, 3.3387, 2.5367, 4.2124, 4.9457], + device='cuda:1'), covar=tensor([0.1116, 0.1051, 0.0065, 0.0385, 0.0796, 0.1158, 0.0287, 0.0053], + device='cuda:1'), in_proj_covar=tensor([0.0129, 0.0149, 0.0063, 0.0126, 0.0157, 0.0158, 0.0130, 0.0068], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0003, 0.0002, 0.0001], + device='cuda:1') +2023-03-08 18:26:07,477 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3185, 5.2700, 4.6918, 5.2487, 5.1740, 4.7355, 5.1713, 4.7229], + device='cuda:1'), covar=tensor([0.0333, 0.0385, 0.1766, 0.0567, 0.0426, 0.0387, 0.0362, 0.0863], + device='cuda:1'), in_proj_covar=tensor([0.0282, 0.0307, 0.0473, 0.0254, 0.0233, 0.0293, 0.0311, 0.0383], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0006, 0.0004, 0.0003, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 18:26:26,310 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=5.02 vs. limit=5.0 +2023-03-08 18:26:41,850 INFO [train.py:898] (1/4) Epoch 5, batch 150, loss[loss=0.291, simple_loss=0.355, pruned_loss=0.1135, over 18485.00 frames. ], tot_loss[loss=0.2425, simple_loss=0.316, pruned_loss=0.08453, over 1921850.66 frames. ], batch size: 59, lr: 2.30e-02, grad_scale: 8.0 +2023-03-08 18:26:50,216 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8005, 2.8634, 2.4808, 2.7896, 3.5728, 3.5157, 2.9370, 3.0318], + device='cuda:1'), covar=tensor([0.0331, 0.0323, 0.0969, 0.0499, 0.0292, 0.0340, 0.0516, 0.0383], + device='cuda:1'), in_proj_covar=tensor([0.0102, 0.0074, 0.0134, 0.0100, 0.0075, 0.0058, 0.0092, 0.0093], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 18:27:26,624 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.78 vs. limit=2.0 +2023-03-08 18:27:26,968 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.743e+02 4.388e+02 5.420e+02 7.125e+02 1.692e+03, threshold=1.084e+03, percent-clipped=3.0 +2023-03-08 18:27:30,701 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14728.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:27:40,886 INFO [train.py:898] (1/4) Epoch 5, batch 200, loss[loss=0.2616, simple_loss=0.3374, pruned_loss=0.09291, over 18296.00 frames. ], tot_loss[loss=0.242, simple_loss=0.315, pruned_loss=0.08446, over 2290400.11 frames. ], batch size: 54, lr: 2.30e-02, grad_scale: 8.0 +2023-03-08 18:27:45,222 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14740.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:28:06,724 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14759.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:28:17,910 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14769.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:28:23,135 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8911, 4.5737, 4.7584, 4.6432, 4.5175, 4.7503, 5.0738, 4.9449], + device='cuda:1'), covar=tensor([0.0059, 0.0099, 0.0122, 0.0080, 0.0091, 0.0107, 0.0084, 0.0131], + device='cuda:1'), in_proj_covar=tensor([0.0060, 0.0047, 0.0045, 0.0058, 0.0050, 0.0066, 0.0055, 0.0054], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 18:28:26,995 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14776.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:28:27,187 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14776.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 18:28:39,575 INFO [train.py:898] (1/4) Epoch 5, batch 250, loss[loss=0.2653, simple_loss=0.3348, pruned_loss=0.09787, over 18591.00 frames. ], tot_loss[loss=0.242, simple_loss=0.315, pruned_loss=0.08446, over 2572742.82 frames. ], batch size: 54, lr: 2.30e-02, grad_scale: 8.0 +2023-03-08 18:28:56,736 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14801.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:29:03,303 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14807.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:29:04,660 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5512, 2.6948, 2.5143, 2.8361, 3.4527, 3.3765, 2.7861, 2.9205], + device='cuda:1'), covar=tensor([0.0259, 0.0245, 0.0837, 0.0332, 0.0182, 0.0215, 0.0346, 0.0321], + device='cuda:1'), in_proj_covar=tensor([0.0098, 0.0070, 0.0129, 0.0097, 0.0074, 0.0055, 0.0087, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 18:29:21,946 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14824.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 18:29:23,285 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.926e+02 4.780e+02 5.936e+02 7.141e+02 1.719e+03, threshold=1.187e+03, percent-clipped=5.0 +2023-03-08 18:29:33,925 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.69 vs. limit=5.0 +2023-03-08 18:29:37,969 INFO [train.py:898] (1/4) Epoch 5, batch 300, loss[loss=0.2511, simple_loss=0.3299, pruned_loss=0.08617, over 18284.00 frames. ], tot_loss[loss=0.2442, simple_loss=0.3167, pruned_loss=0.08586, over 2779050.30 frames. ], batch size: 57, lr: 2.29e-02, grad_scale: 8.0 +2023-03-08 18:29:45,112 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14843.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:30:23,657 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7014, 4.6802, 4.9122, 4.5163, 4.5049, 4.5781, 5.0767, 4.9770], + device='cuda:1'), covar=tensor([0.0068, 0.0078, 0.0090, 0.0105, 0.0081, 0.0122, 0.0075, 0.0114], + device='cuda:1'), in_proj_covar=tensor([0.0062, 0.0048, 0.0046, 0.0059, 0.0051, 0.0067, 0.0055, 0.0054], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 18:30:35,694 INFO [train.py:898] (1/4) Epoch 5, batch 350, loss[loss=0.2176, simple_loss=0.2987, pruned_loss=0.06823, over 18376.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.3153, pruned_loss=0.0846, over 2967810.20 frames. ], batch size: 50, lr: 2.29e-02, grad_scale: 8.0 +2023-03-08 18:31:17,010 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8805, 2.8414, 4.2076, 3.7911, 2.5005, 4.6457, 3.8173, 2.7563], + device='cuda:1'), covar=tensor([0.0288, 0.0951, 0.0097, 0.0248, 0.1497, 0.0080, 0.0266, 0.0917], + device='cuda:1'), in_proj_covar=tensor([0.0132, 0.0168, 0.0090, 0.0104, 0.0178, 0.0119, 0.0130, 0.0169], + device='cuda:1'), out_proj_covar=tensor([1.2837e-04, 1.6398e-04, 9.5190e-05, 9.9649e-05, 1.6999e-04, 1.1204e-04, + 1.3488e-04, 1.6943e-04], device='cuda:1') +2023-03-08 18:31:20,045 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.949e+02 4.422e+02 5.424e+02 6.713e+02 1.260e+03, threshold=1.085e+03, percent-clipped=2.0 +2023-03-08 18:31:34,506 INFO [train.py:898] (1/4) Epoch 5, batch 400, loss[loss=0.2023, simple_loss=0.2785, pruned_loss=0.06305, over 18504.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.3142, pruned_loss=0.08366, over 3100429.34 frames. ], batch size: 47, lr: 2.29e-02, grad_scale: 8.0 +2023-03-08 18:32:17,035 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0136, 5.3284, 2.7037, 5.0456, 4.9774, 5.3452, 5.2051, 2.3707], + device='cuda:1'), covar=tensor([0.0152, 0.0046, 0.0684, 0.0060, 0.0050, 0.0038, 0.0072, 0.1114], + device='cuda:1'), in_proj_covar=tensor([0.0063, 0.0049, 0.0078, 0.0059, 0.0056, 0.0047, 0.0062, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0002, 0.0003, 0.0004], + device='cuda:1') +2023-03-08 18:32:31,717 INFO [train.py:898] (1/4) Epoch 5, batch 450, loss[loss=0.2414, simple_loss=0.3183, pruned_loss=0.08226, over 18615.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3144, pruned_loss=0.08333, over 3208082.29 frames. ], batch size: 52, lr: 2.28e-02, grad_scale: 8.0 +2023-03-08 18:33:16,954 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.97 vs. limit=2.0 +2023-03-08 18:33:17,343 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.939e+02 4.662e+02 5.762e+02 7.845e+02 1.537e+03, threshold=1.152e+03, percent-clipped=9.0 +2023-03-08 18:33:30,767 INFO [train.py:898] (1/4) Epoch 5, batch 500, loss[loss=0.2634, simple_loss=0.344, pruned_loss=0.0914, over 17803.00 frames. ], tot_loss[loss=0.2409, simple_loss=0.3151, pruned_loss=0.08337, over 3301824.02 frames. ], batch size: 70, lr: 2.28e-02, grad_scale: 8.0 +2023-03-08 18:34:05,337 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0108, 5.5517, 5.1405, 5.3328, 5.0445, 5.2339, 5.6360, 5.5259], + device='cuda:1'), covar=tensor([0.1205, 0.0673, 0.0495, 0.0659, 0.1636, 0.0569, 0.0490, 0.0648], + device='cuda:1'), in_proj_covar=tensor([0.0377, 0.0308, 0.0235, 0.0336, 0.0480, 0.0341, 0.0375, 0.0305], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:1') +2023-03-08 18:34:09,993 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15069.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:34:30,389 INFO [train.py:898] (1/4) Epoch 5, batch 550, loss[loss=0.205, simple_loss=0.2808, pruned_loss=0.06459, over 18264.00 frames. ], tot_loss[loss=0.2407, simple_loss=0.3149, pruned_loss=0.08323, over 3374901.11 frames. ], batch size: 45, lr: 2.28e-02, grad_scale: 8.0 +2023-03-08 18:34:41,261 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=15096.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:35:06,918 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15117.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:35:15,841 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.759e+02 4.602e+02 5.527e+02 7.375e+02 1.442e+03, threshold=1.105e+03, percent-clipped=1.0 +2023-03-08 18:35:29,476 INFO [train.py:898] (1/4) Epoch 5, batch 600, loss[loss=0.2009, simple_loss=0.2742, pruned_loss=0.06377, over 17622.00 frames. ], tot_loss[loss=0.2398, simple_loss=0.314, pruned_loss=0.08279, over 3417992.41 frames. ], batch size: 39, lr: 2.27e-02, grad_scale: 8.0 +2023-03-08 18:35:30,986 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1058, 3.0010, 4.1950, 4.0686, 2.6073, 4.5591, 3.8835, 3.0068], + device='cuda:1'), covar=tensor([0.0198, 0.0763, 0.0103, 0.0151, 0.1040, 0.0073, 0.0244, 0.0706], + device='cuda:1'), in_proj_covar=tensor([0.0130, 0.0164, 0.0091, 0.0099, 0.0174, 0.0117, 0.0129, 0.0166], + device='cuda:1'), out_proj_covar=tensor([1.2541e-04, 1.6030e-04, 9.5666e-05, 9.5412e-05, 1.6687e-04, 1.0967e-04, + 1.3413e-04, 1.6527e-04], device='cuda:1') +2023-03-08 18:35:36,509 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15143.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:36:28,272 INFO [train.py:898] (1/4) Epoch 5, batch 650, loss[loss=0.2079, simple_loss=0.293, pruned_loss=0.06138, over 18487.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.3131, pruned_loss=0.08255, over 3461541.86 frames. ], batch size: 51, lr: 2.27e-02, grad_scale: 8.0 +2023-03-08 18:36:32,826 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15191.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:36:37,729 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15195.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:37:13,515 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.710e+02 4.789e+02 6.054e+02 7.320e+02 2.200e+03, threshold=1.211e+03, percent-clipped=6.0 +2023-03-08 18:37:27,064 INFO [train.py:898] (1/4) Epoch 5, batch 700, loss[loss=0.2005, simple_loss=0.2725, pruned_loss=0.06422, over 17187.00 frames. ], tot_loss[loss=0.2384, simple_loss=0.3123, pruned_loss=0.08232, over 3488425.13 frames. ], batch size: 38, lr: 2.27e-02, grad_scale: 8.0 +2023-03-08 18:37:27,345 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9235, 4.9028, 4.3309, 4.7955, 4.8311, 4.3030, 4.7531, 4.5177], + device='cuda:1'), covar=tensor([0.0387, 0.0399, 0.1655, 0.0675, 0.0443, 0.0492, 0.0388, 0.0707], + device='cuda:1'), in_proj_covar=tensor([0.0289, 0.0315, 0.0474, 0.0263, 0.0238, 0.0298, 0.0314, 0.0394], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0006, 0.0004, 0.0003, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 18:37:49,595 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15256.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 18:37:51,870 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.72 vs. limit=2.0 +2023-03-08 18:38:26,033 INFO [train.py:898] (1/4) Epoch 5, batch 750, loss[loss=0.192, simple_loss=0.2738, pruned_loss=0.05515, over 18416.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.3115, pruned_loss=0.08164, over 3515764.94 frames. ], batch size: 43, lr: 2.26e-02, grad_scale: 8.0 +2023-03-08 18:39:10,698 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.929e+02 4.559e+02 5.323e+02 7.039e+02 1.434e+03, threshold=1.065e+03, percent-clipped=2.0 +2023-03-08 18:39:24,947 INFO [train.py:898] (1/4) Epoch 5, batch 800, loss[loss=0.2172, simple_loss=0.3034, pruned_loss=0.0655, over 18381.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3119, pruned_loss=0.08195, over 3525548.25 frames. ], batch size: 50, lr: 2.26e-02, grad_scale: 8.0 +2023-03-08 18:39:54,317 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.90 vs. limit=2.0 +2023-03-08 18:40:24,763 INFO [train.py:898] (1/4) Epoch 5, batch 850, loss[loss=0.1868, simple_loss=0.2588, pruned_loss=0.05743, over 18394.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3112, pruned_loss=0.08146, over 3543232.34 frames. ], batch size: 42, lr: 2.26e-02, grad_scale: 8.0 +2023-03-08 18:40:35,210 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15396.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:40:44,636 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5379, 3.3761, 3.1701, 2.9688, 3.3865, 2.9294, 2.5783, 3.5904], + device='cuda:1'), covar=tensor([0.0034, 0.0070, 0.0074, 0.0110, 0.0053, 0.0135, 0.0169, 0.0057], + device='cuda:1'), in_proj_covar=tensor([0.0056, 0.0068, 0.0065, 0.0098, 0.0063, 0.0104, 0.0111, 0.0060], + device='cuda:1'), out_proj_covar=tensor([8.4663e-05, 1.0958e-04, 1.0669e-04, 1.6074e-04, 9.8239e-05, 1.6770e-04, + 1.7899e-04, 9.6004e-05], device='cuda:1') +2023-03-08 18:40:52,565 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.85 vs. limit=2.0 +2023-03-08 18:41:09,842 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.891e+02 4.807e+02 5.667e+02 6.596e+02 1.929e+03, threshold=1.133e+03, percent-clipped=7.0 +2023-03-08 18:41:17,236 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6293, 3.4479, 2.9582, 3.0433, 3.4836, 3.0567, 2.6059, 3.6822], + device='cuda:1'), covar=tensor([0.0046, 0.0081, 0.0132, 0.0099, 0.0093, 0.0161, 0.0194, 0.0063], + device='cuda:1'), in_proj_covar=tensor([0.0055, 0.0069, 0.0066, 0.0098, 0.0064, 0.0104, 0.0113, 0.0061], + device='cuda:1'), out_proj_covar=tensor([8.4161e-05, 1.0996e-04, 1.0776e-04, 1.6065e-04, 9.9567e-05, 1.6777e-04, + 1.8261e-04, 9.6508e-05], device='cuda:1') +2023-03-08 18:41:24,180 INFO [train.py:898] (1/4) Epoch 5, batch 900, loss[loss=0.2395, simple_loss=0.3136, pruned_loss=0.08274, over 18113.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.3116, pruned_loss=0.08175, over 3565550.97 frames. ], batch size: 62, lr: 2.25e-02, grad_scale: 8.0 +2023-03-08 18:41:32,532 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15444.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:42:23,789 INFO [train.py:898] (1/4) Epoch 5, batch 950, loss[loss=0.2074, simple_loss=0.2831, pruned_loss=0.0658, over 18361.00 frames. ], tot_loss[loss=0.2373, simple_loss=0.3113, pruned_loss=0.08166, over 3559957.63 frames. ], batch size: 46, lr: 2.25e-02, grad_scale: 8.0 +2023-03-08 18:42:51,892 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5327, 3.4206, 3.1518, 2.8028, 3.2624, 2.6937, 2.3984, 3.4786], + device='cuda:1'), covar=tensor([0.0039, 0.0071, 0.0080, 0.0134, 0.0074, 0.0164, 0.0205, 0.0068], + device='cuda:1'), in_proj_covar=tensor([0.0054, 0.0069, 0.0065, 0.0097, 0.0064, 0.0103, 0.0112, 0.0061], + device='cuda:1'), out_proj_covar=tensor([8.1765e-05, 1.1029e-04, 1.0682e-04, 1.5867e-04, 1.0100e-04, 1.6599e-04, + 1.8058e-04, 9.6342e-05], device='cuda:1') +2023-03-08 18:43:09,482 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.487e+02 4.499e+02 5.437e+02 6.753e+02 3.373e+03, threshold=1.087e+03, percent-clipped=5.0 +2023-03-08 18:43:13,585 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-08 18:43:23,267 INFO [train.py:898] (1/4) Epoch 5, batch 1000, loss[loss=0.2355, simple_loss=0.3102, pruned_loss=0.08044, over 18477.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.3106, pruned_loss=0.08101, over 3575822.95 frames. ], batch size: 47, lr: 2.25e-02, grad_scale: 8.0 +2023-03-08 18:43:39,871 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=15551.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 18:43:56,491 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5508, 6.1633, 5.2767, 5.9372, 5.5583, 5.7547, 6.1696, 6.1105], + device='cuda:1'), covar=tensor([0.1078, 0.0462, 0.0363, 0.0593, 0.1381, 0.0534, 0.0425, 0.0525], + device='cuda:1'), in_proj_covar=tensor([0.0386, 0.0314, 0.0245, 0.0339, 0.0480, 0.0343, 0.0387, 0.0317], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:1') +2023-03-08 18:44:23,075 INFO [train.py:898] (1/4) Epoch 5, batch 1050, loss[loss=0.2086, simple_loss=0.2789, pruned_loss=0.06914, over 18417.00 frames. ], tot_loss[loss=0.237, simple_loss=0.311, pruned_loss=0.08145, over 3572100.92 frames. ], batch size: 43, lr: 2.24e-02, grad_scale: 8.0 +2023-03-08 18:45:08,994 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.376e+02 4.268e+02 5.180e+02 6.022e+02 1.534e+03, threshold=1.036e+03, percent-clipped=2.0 +2023-03-08 18:45:22,602 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6775, 3.5833, 4.7528, 3.0052, 3.9445, 2.9723, 3.0987, 2.0675], + device='cuda:1'), covar=tensor([0.0664, 0.0496, 0.0072, 0.0428, 0.0462, 0.1297, 0.1298, 0.1314], + device='cuda:1'), in_proj_covar=tensor([0.0158, 0.0163, 0.0080, 0.0129, 0.0179, 0.0210, 0.0179, 0.0174], + device='cuda:1'), out_proj_covar=tensor([1.5446e-04, 1.6431e-04, 8.3513e-05, 1.2882e-04, 1.8021e-04, 2.0711e-04, + 1.8790e-04, 1.7461e-04], device='cuda:1') +2023-03-08 18:45:23,135 INFO [train.py:898] (1/4) Epoch 5, batch 1100, loss[loss=0.2695, simple_loss=0.3425, pruned_loss=0.09826, over 17740.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.312, pruned_loss=0.08148, over 3576304.77 frames. ], batch size: 70, lr: 2.24e-02, grad_scale: 8.0 +2023-03-08 18:46:00,728 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9684, 1.9356, 3.6177, 3.1789, 3.5525, 5.1881, 4.4502, 4.6293], + device='cuda:1'), covar=tensor([0.0318, 0.0840, 0.0519, 0.0471, 0.0873, 0.0025, 0.0195, 0.0105], + device='cuda:1'), in_proj_covar=tensor([0.0142, 0.0188, 0.0143, 0.0179, 0.0270, 0.0101, 0.0155, 0.0135], + device='cuda:1'), out_proj_covar=tensor([1.0924e-04, 1.4346e-04, 1.1926e-04, 1.2706e-04, 2.0455e-04, 7.1109e-05, + 1.1912e-04, 1.0308e-04], device='cuda:1') +2023-03-08 18:46:22,717 INFO [train.py:898] (1/4) Epoch 5, batch 1150, loss[loss=0.2334, simple_loss=0.3159, pruned_loss=0.07543, over 18637.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.3122, pruned_loss=0.08173, over 3579164.11 frames. ], batch size: 52, lr: 2.24e-02, grad_scale: 8.0 +2023-03-08 18:46:40,863 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9076, 3.3101, 3.4312, 2.8552, 2.9547, 3.0817, 2.4235, 2.1113], + device='cuda:1'), covar=tensor([0.0177, 0.0152, 0.0049, 0.0214, 0.0319, 0.0122, 0.0573, 0.0730], + device='cuda:1'), in_proj_covar=tensor([0.0038, 0.0037, 0.0030, 0.0044, 0.0061, 0.0035, 0.0062, 0.0068], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0004, 0.0002, 0.0004, 0.0004], + device='cuda:1') +2023-03-08 18:46:49,835 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5598, 3.3254, 1.6788, 4.4323, 3.0273, 4.5894, 1.7909, 4.1875], + device='cuda:1'), covar=tensor([0.0468, 0.0770, 0.1472, 0.0308, 0.0823, 0.0116, 0.1270, 0.0192], + device='cuda:1'), in_proj_covar=tensor([0.0149, 0.0186, 0.0162, 0.0154, 0.0159, 0.0111, 0.0165, 0.0151], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 18:47:06,231 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.304e+02 5.042e+02 5.995e+02 7.754e+02 1.495e+03, threshold=1.199e+03, percent-clipped=7.0 +2023-03-08 18:47:21,490 INFO [train.py:898] (1/4) Epoch 5, batch 1200, loss[loss=0.213, simple_loss=0.271, pruned_loss=0.07752, over 18438.00 frames. ], tot_loss[loss=0.2377, simple_loss=0.3118, pruned_loss=0.08185, over 3578654.66 frames. ], batch size: 43, lr: 2.23e-02, grad_scale: 8.0 +2023-03-08 18:48:19,536 INFO [train.py:898] (1/4) Epoch 5, batch 1250, loss[loss=0.2073, simple_loss=0.2746, pruned_loss=0.06998, over 18147.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.3109, pruned_loss=0.08117, over 3583133.14 frames. ], batch size: 44, lr: 2.23e-02, grad_scale: 8.0 +2023-03-08 18:49:03,916 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.909e+02 4.343e+02 5.272e+02 6.723e+02 1.264e+03, threshold=1.054e+03, percent-clipped=1.0 +2023-03-08 18:49:18,507 INFO [train.py:898] (1/4) Epoch 5, batch 1300, loss[loss=0.2316, simple_loss=0.3129, pruned_loss=0.07513, over 18471.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.3108, pruned_loss=0.08124, over 3580343.03 frames. ], batch size: 59, lr: 2.23e-02, grad_scale: 4.0 +2023-03-08 18:49:35,337 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15851.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:50:12,064 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15883.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:50:16,767 INFO [train.py:898] (1/4) Epoch 5, batch 1350, loss[loss=0.2478, simple_loss=0.3198, pruned_loss=0.08788, over 18408.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.3104, pruned_loss=0.08097, over 3582493.08 frames. ], batch size: 48, lr: 2.22e-02, grad_scale: 4.0 +2023-03-08 18:50:31,443 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15899.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:51:00,800 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.58 vs. limit=2.0 +2023-03-08 18:51:02,179 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.900e+02 4.519e+02 5.491e+02 6.869e+02 1.432e+03, threshold=1.098e+03, percent-clipped=4.0 +2023-03-08 18:51:15,442 INFO [train.py:898] (1/4) Epoch 5, batch 1400, loss[loss=0.2278, simple_loss=0.3134, pruned_loss=0.07111, over 18363.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.3113, pruned_loss=0.08123, over 3593455.39 frames. ], batch size: 55, lr: 2.22e-02, grad_scale: 4.0 +2023-03-08 18:51:15,758 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.4621, 5.0917, 5.0951, 4.9053, 4.6678, 4.8773, 4.1898, 4.8380], + device='cuda:1'), covar=tensor([0.0254, 0.0255, 0.0213, 0.0266, 0.0398, 0.0236, 0.1322, 0.0300], + device='cuda:1'), in_proj_covar=tensor([0.0125, 0.0165, 0.0148, 0.0142, 0.0160, 0.0166, 0.0233, 0.0146], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006, 0.0004], + device='cuda:1') +2023-03-08 18:51:24,403 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15944.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:52:13,988 INFO [train.py:898] (1/4) Epoch 5, batch 1450, loss[loss=0.1965, simple_loss=0.2667, pruned_loss=0.0632, over 18581.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3095, pruned_loss=0.08053, over 3592371.32 frames. ], batch size: 45, lr: 2.22e-02, grad_scale: 4.0 +2023-03-08 18:52:36,585 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16001.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:53:04,646 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.050e+02 4.332e+02 5.250e+02 6.313e+02 1.611e+03, threshold=1.050e+03, percent-clipped=2.0 +2023-03-08 18:53:07,305 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0351, 3.0080, 4.1901, 4.0016, 2.2418, 4.5672, 3.6279, 2.9123], + device='cuda:1'), covar=tensor([0.0204, 0.0878, 0.0131, 0.0152, 0.1437, 0.0092, 0.0378, 0.0805], + device='cuda:1'), in_proj_covar=tensor([0.0138, 0.0173, 0.0095, 0.0102, 0.0182, 0.0127, 0.0138, 0.0171], + device='cuda:1'), out_proj_covar=tensor([1.3312e-04, 1.6785e-04, 1.0059e-04, 9.7986e-05, 1.7461e-04, 1.1973e-04, + 1.4205e-04, 1.7057e-04], device='cuda:1') +2023-03-08 18:53:16,949 INFO [train.py:898] (1/4) Epoch 5, batch 1500, loss[loss=0.2407, simple_loss=0.3018, pruned_loss=0.08977, over 18500.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3101, pruned_loss=0.08064, over 3585359.11 frames. ], batch size: 44, lr: 2.21e-02, grad_scale: 4.0 +2023-03-08 18:53:40,748 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1382, 2.3769, 2.2592, 2.4966, 3.0629, 3.1582, 2.5959, 2.8668], + device='cuda:1'), covar=tensor([0.0251, 0.0422, 0.0757, 0.0387, 0.0227, 0.0177, 0.0392, 0.0292], + device='cuda:1'), in_proj_covar=tensor([0.0098, 0.0075, 0.0131, 0.0097, 0.0073, 0.0054, 0.0091, 0.0095], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 18:53:47,599 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16062.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:54:14,626 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16085.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:54:16,407 INFO [train.py:898] (1/4) Epoch 5, batch 1550, loss[loss=0.2392, simple_loss=0.314, pruned_loss=0.08217, over 18516.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.311, pruned_loss=0.08142, over 3567458.01 frames. ], batch size: 49, lr: 2.21e-02, grad_scale: 4.0 +2023-03-08 18:55:02,225 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.998e+02 4.560e+02 5.884e+02 7.024e+02 2.005e+03, threshold=1.177e+03, percent-clipped=3.0 +2023-03-08 18:55:14,721 INFO [train.py:898] (1/4) Epoch 5, batch 1600, loss[loss=0.1921, simple_loss=0.2634, pruned_loss=0.06038, over 18157.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.3103, pruned_loss=0.0812, over 3574970.91 frames. ], batch size: 44, lr: 2.21e-02, grad_scale: 8.0 +2023-03-08 18:55:26,036 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16146.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:55:29,654 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16149.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:56:14,000 INFO [train.py:898] (1/4) Epoch 5, batch 1650, loss[loss=0.2277, simple_loss=0.3032, pruned_loss=0.07609, over 18377.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.3111, pruned_loss=0.08132, over 3577899.50 frames. ], batch size: 50, lr: 2.20e-02, grad_scale: 8.0 +2023-03-08 18:56:17,792 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8125, 2.2297, 2.0678, 2.2334, 2.7474, 2.7677, 2.3916, 2.5122], + device='cuda:1'), covar=tensor([0.0247, 0.0332, 0.0830, 0.0379, 0.0299, 0.0190, 0.0530, 0.0314], + device='cuda:1'), in_proj_covar=tensor([0.0098, 0.0076, 0.0131, 0.0099, 0.0073, 0.0054, 0.0091, 0.0097], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 18:56:42,350 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16210.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:56:54,905 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2169, 5.5248, 3.3132, 5.2224, 5.1439, 5.5416, 5.3669, 2.7402], + device='cuda:1'), covar=tensor([0.0131, 0.0025, 0.0504, 0.0053, 0.0043, 0.0032, 0.0059, 0.0885], + device='cuda:1'), in_proj_covar=tensor([0.0062, 0.0049, 0.0079, 0.0061, 0.0058, 0.0047, 0.0061, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-08 18:57:00,821 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.794e+02 4.512e+02 5.424e+02 6.724e+02 1.524e+03, threshold=1.085e+03, percent-clipped=4.0 +2023-03-08 18:57:06,090 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-08 18:57:13,384 INFO [train.py:898] (1/4) Epoch 5, batch 1700, loss[loss=0.2249, simple_loss=0.3008, pruned_loss=0.07448, over 18365.00 frames. ], tot_loss[loss=0.2365, simple_loss=0.3112, pruned_loss=0.08087, over 3584998.13 frames. ], batch size: 50, lr: 2.20e-02, grad_scale: 8.0 +2023-03-08 18:57:15,879 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16239.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:57:46,465 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6350, 4.4362, 4.5103, 3.8166, 3.4191, 3.4772, 2.1999, 2.2601], + device='cuda:1'), covar=tensor([0.0184, 0.0152, 0.0047, 0.0168, 0.0378, 0.0168, 0.0800, 0.0918], + device='cuda:1'), in_proj_covar=tensor([0.0039, 0.0037, 0.0031, 0.0044, 0.0061, 0.0036, 0.0061, 0.0067], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0004, 0.0002, 0.0004, 0.0004], + device='cuda:1') +2023-03-08 18:58:13,016 INFO [train.py:898] (1/4) Epoch 5, batch 1750, loss[loss=0.2333, simple_loss=0.3079, pruned_loss=0.07935, over 18621.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3109, pruned_loss=0.08055, over 3585973.83 frames. ], batch size: 52, lr: 2.20e-02, grad_scale: 8.0 +2023-03-08 18:58:28,688 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4278, 5.1851, 5.5628, 5.2014, 5.3068, 6.1613, 5.7897, 5.5932], + device='cuda:1'), covar=tensor([0.0760, 0.0546, 0.0584, 0.0636, 0.1301, 0.0663, 0.0490, 0.1482], + device='cuda:1'), in_proj_covar=tensor([0.0233, 0.0177, 0.0181, 0.0175, 0.0219, 0.0264, 0.0171, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-08 18:58:58,897 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.349e+02 4.407e+02 5.169e+02 6.479e+02 1.420e+03, threshold=1.034e+03, percent-clipped=4.0 +2023-03-08 18:59:11,657 INFO [train.py:898] (1/4) Epoch 5, batch 1800, loss[loss=0.289, simple_loss=0.3431, pruned_loss=0.1174, over 12627.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.3116, pruned_loss=0.08113, over 3578020.47 frames. ], batch size: 129, lr: 2.19e-02, grad_scale: 8.0 +2023-03-08 18:59:35,065 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16357.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:59:52,223 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16371.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:59:52,373 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.2964, 3.4803, 4.7716, 4.1573, 3.1643, 2.7885, 4.3097, 4.7837], + device='cuda:1'), covar=tensor([0.0864, 0.1199, 0.0052, 0.0218, 0.0759, 0.0978, 0.0240, 0.0068], + device='cuda:1'), in_proj_covar=tensor([0.0130, 0.0168, 0.0065, 0.0134, 0.0159, 0.0162, 0.0134, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0002, 0.0003, 0.0002, 0.0001], + device='cuda:1') +2023-03-08 19:00:10,475 INFO [train.py:898] (1/4) Epoch 5, batch 1850, loss[loss=0.2595, simple_loss=0.3333, pruned_loss=0.09282, over 18351.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.3121, pruned_loss=0.08172, over 3572162.86 frames. ], batch size: 56, lr: 2.19e-02, grad_scale: 8.0 +2023-03-08 19:00:55,917 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.146e+02 4.944e+02 5.980e+02 8.174e+02 1.619e+03, threshold=1.196e+03, percent-clipped=7.0 +2023-03-08 19:01:03,104 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16432.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:01:09,064 INFO [train.py:898] (1/4) Epoch 5, batch 1900, loss[loss=0.2297, simple_loss=0.3142, pruned_loss=0.07263, over 18621.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.3125, pruned_loss=0.08194, over 3569080.46 frames. ], batch size: 52, lr: 2.19e-02, grad_scale: 8.0 +2023-03-08 19:01:13,693 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16441.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:02:07,352 INFO [train.py:898] (1/4) Epoch 5, batch 1950, loss[loss=0.2395, simple_loss=0.3262, pruned_loss=0.07637, over 18116.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3117, pruned_loss=0.08121, over 3572357.79 frames. ], batch size: 62, lr: 2.19e-02, grad_scale: 8.0 +2023-03-08 19:02:28,045 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16505.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:02:53,792 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.181e+02 4.721e+02 5.735e+02 6.721e+02 1.938e+03, threshold=1.147e+03, percent-clipped=5.0 +2023-03-08 19:03:06,024 INFO [train.py:898] (1/4) Epoch 5, batch 2000, loss[loss=0.2337, simple_loss=0.2971, pruned_loss=0.08517, over 18484.00 frames. ], tot_loss[loss=0.2387, simple_loss=0.3133, pruned_loss=0.08209, over 3565645.40 frames. ], batch size: 44, lr: 2.18e-02, grad_scale: 8.0 +2023-03-08 19:03:09,147 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16539.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:04:04,802 INFO [train.py:898] (1/4) Epoch 5, batch 2050, loss[loss=0.2609, simple_loss=0.3454, pruned_loss=0.08824, over 18291.00 frames. ], tot_loss[loss=0.2369, simple_loss=0.3119, pruned_loss=0.08099, over 3587258.51 frames. ], batch size: 57, lr: 2.18e-02, grad_scale: 8.0 +2023-03-08 19:04:04,998 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16587.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:04:32,867 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.59 vs. limit=2.0 +2023-03-08 19:04:51,606 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.823e+02 4.363e+02 5.469e+02 6.595e+02 1.217e+03, threshold=1.094e+03, percent-clipped=2.0 +2023-03-08 19:05:04,769 INFO [train.py:898] (1/4) Epoch 5, batch 2100, loss[loss=0.2618, simple_loss=0.3381, pruned_loss=0.09278, over 18288.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3122, pruned_loss=0.08091, over 3588671.98 frames. ], batch size: 57, lr: 2.18e-02, grad_scale: 8.0 +2023-03-08 19:05:28,367 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16657.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:05:38,161 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.74 vs. limit=2.0 +2023-03-08 19:06:04,036 INFO [train.py:898] (1/4) Epoch 5, batch 2150, loss[loss=0.2655, simple_loss=0.3429, pruned_loss=0.09402, over 18106.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.3124, pruned_loss=0.0809, over 3587528.51 frames. ], batch size: 62, lr: 2.17e-02, grad_scale: 8.0 +2023-03-08 19:06:24,753 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16705.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:06:30,762 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6256, 1.6178, 3.2171, 2.4912, 3.3046, 4.9207, 4.2817, 4.0407], + device='cuda:1'), covar=tensor([0.0435, 0.1054, 0.0756, 0.0703, 0.1005, 0.0028, 0.0203, 0.0165], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0197, 0.0158, 0.0188, 0.0280, 0.0105, 0.0163, 0.0140], + device='cuda:1'), out_proj_covar=tensor([1.1271e-04, 1.4802e-04, 1.2896e-04, 1.3147e-04, 2.0846e-04, 7.3148e-05, + 1.2245e-04, 1.0547e-04], device='cuda:1') +2023-03-08 19:06:49,446 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.208e+02 4.846e+02 5.504e+02 7.182e+02 1.365e+03, threshold=1.101e+03, percent-clipped=1.0 +2023-03-08 19:06:50,859 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16727.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:07:02,272 INFO [train.py:898] (1/4) Epoch 5, batch 2200, loss[loss=0.208, simple_loss=0.2875, pruned_loss=0.06424, over 18481.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.3134, pruned_loss=0.08186, over 3564914.40 frames. ], batch size: 47, lr: 2.17e-02, grad_scale: 8.0 +2023-03-08 19:07:06,999 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16741.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:07:18,193 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.86 vs. limit=2.0 +2023-03-08 19:08:01,121 INFO [train.py:898] (1/4) Epoch 5, batch 2250, loss[loss=0.2294, simple_loss=0.3016, pruned_loss=0.07859, over 18502.00 frames. ], tot_loss[loss=0.238, simple_loss=0.3126, pruned_loss=0.08169, over 3565620.07 frames. ], batch size: 47, lr: 2.17e-02, grad_scale: 8.0 +2023-03-08 19:08:03,429 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16789.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:08:03,985 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.72 vs. limit=2.0 +2023-03-08 19:08:22,367 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16805.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:08:40,210 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.00 vs. limit=5.0 +2023-03-08 19:08:46,495 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.297e+02 4.310e+02 5.501e+02 7.389e+02 1.420e+03, threshold=1.100e+03, percent-clipped=2.0 +2023-03-08 19:09:00,059 INFO [train.py:898] (1/4) Epoch 5, batch 2300, loss[loss=0.2654, simple_loss=0.3291, pruned_loss=0.1008, over 18385.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.3107, pruned_loss=0.08105, over 3571400.06 frames. ], batch size: 50, lr: 2.16e-02, grad_scale: 8.0 +2023-03-08 19:09:17,950 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16853.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:09:35,506 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16868.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 19:09:57,302 INFO [train.py:898] (1/4) Epoch 5, batch 2350, loss[loss=0.2243, simple_loss=0.2961, pruned_loss=0.07623, over 18251.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.3105, pruned_loss=0.08097, over 3578301.63 frames. ], batch size: 47, lr: 2.16e-02, grad_scale: 8.0 +2023-03-08 19:10:19,757 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16906.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:10:31,648 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5047, 3.2580, 1.8571, 4.1299, 2.6275, 4.4258, 2.0661, 3.7260], + device='cuda:1'), covar=tensor([0.0509, 0.0846, 0.1396, 0.0335, 0.1044, 0.0116, 0.1231, 0.0333], + device='cuda:1'), in_proj_covar=tensor([0.0157, 0.0193, 0.0168, 0.0166, 0.0167, 0.0122, 0.0171, 0.0163], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 19:10:42,832 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.402e+02 4.892e+02 6.073e+02 7.229e+02 1.276e+03, threshold=1.215e+03, percent-clipped=6.0 +2023-03-08 19:10:47,123 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16929.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 19:10:56,193 INFO [train.py:898] (1/4) Epoch 5, batch 2400, loss[loss=0.2511, simple_loss=0.3261, pruned_loss=0.08809, over 18122.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.3107, pruned_loss=0.08106, over 3570003.34 frames. ], batch size: 62, lr: 2.16e-02, grad_scale: 8.0 +2023-03-08 19:11:31,388 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16967.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 19:11:54,536 INFO [train.py:898] (1/4) Epoch 5, batch 2450, loss[loss=0.2472, simple_loss=0.3205, pruned_loss=0.08692, over 18505.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3097, pruned_loss=0.08081, over 3575086.94 frames. ], batch size: 51, lr: 2.16e-02, grad_scale: 8.0 +2023-03-08 19:12:13,220 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3979, 5.1066, 5.4901, 5.4455, 5.2780, 6.1319, 5.5472, 5.5859], + device='cuda:1'), covar=tensor([0.0899, 0.0659, 0.0724, 0.0498, 0.1336, 0.0691, 0.0602, 0.1553], + device='cuda:1'), in_proj_covar=tensor([0.0243, 0.0184, 0.0193, 0.0185, 0.0232, 0.0268, 0.0176, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-08 19:12:39,639 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.725e+02 4.440e+02 5.461e+02 7.873e+02 1.788e+03, threshold=1.092e+03, percent-clipped=5.0 +2023-03-08 19:12:41,119 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17027.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:12:53,116 INFO [train.py:898] (1/4) Epoch 5, batch 2500, loss[loss=0.1899, simple_loss=0.2659, pruned_loss=0.05702, over 17730.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3098, pruned_loss=0.08046, over 3584713.02 frames. ], batch size: 39, lr: 2.15e-02, grad_scale: 8.0 +2023-03-08 19:12:55,273 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1689, 2.4115, 3.2662, 3.1736, 2.4047, 3.3229, 3.2034, 2.3826], + device='cuda:1'), covar=tensor([0.0234, 0.0801, 0.0125, 0.0141, 0.0879, 0.0158, 0.0282, 0.0741], + device='cuda:1'), in_proj_covar=tensor([0.0141, 0.0182, 0.0098, 0.0103, 0.0178, 0.0130, 0.0144, 0.0172], + device='cuda:1'), out_proj_covar=tensor([1.3659e-04, 1.7578e-04, 1.0256e-04, 9.8918e-05, 1.6922e-04, 1.2359e-04, + 1.4688e-04, 1.7139e-04], device='cuda:1') +2023-03-08 19:13:00,926 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.2370, 2.4453, 2.1726, 2.4413, 3.2587, 3.2521, 2.5992, 2.9193], + device='cuda:1'), covar=tensor([0.0403, 0.0474, 0.1059, 0.0580, 0.0317, 0.0191, 0.0626, 0.0420], + device='cuda:1'), in_proj_covar=tensor([0.0098, 0.0075, 0.0131, 0.0102, 0.0074, 0.0057, 0.0092, 0.0100], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 19:13:37,599 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17075.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:13:50,994 INFO [train.py:898] (1/4) Epoch 5, batch 2550, loss[loss=0.2223, simple_loss=0.31, pruned_loss=0.06726, over 18487.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3088, pruned_loss=0.07964, over 3590460.86 frames. ], batch size: 51, lr: 2.15e-02, grad_scale: 8.0 +2023-03-08 19:14:36,797 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.128e+02 4.504e+02 5.454e+02 6.580e+02 1.151e+03, threshold=1.091e+03, percent-clipped=2.0 +2023-03-08 19:14:49,106 INFO [train.py:898] (1/4) Epoch 5, batch 2600, loss[loss=0.2502, simple_loss=0.325, pruned_loss=0.08772, over 18576.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3088, pruned_loss=0.07969, over 3589245.76 frames. ], batch size: 54, lr: 2.15e-02, grad_scale: 8.0 +2023-03-08 19:15:47,084 INFO [train.py:898] (1/4) Epoch 5, batch 2650, loss[loss=0.2202, simple_loss=0.2964, pruned_loss=0.07201, over 18247.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.309, pruned_loss=0.08002, over 3577345.61 frames. ], batch size: 45, lr: 2.14e-02, grad_scale: 8.0 +2023-03-08 19:15:49,557 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17189.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:15:53,103 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.91 vs. limit=5.0 +2023-03-08 19:16:26,454 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6612, 5.4194, 5.3687, 5.2086, 4.9270, 5.2863, 4.5264, 5.1377], + device='cuda:1'), covar=tensor([0.0214, 0.0222, 0.0185, 0.0229, 0.0321, 0.0185, 0.1063, 0.0242], + device='cuda:1'), in_proj_covar=tensor([0.0129, 0.0172, 0.0158, 0.0149, 0.0166, 0.0170, 0.0241, 0.0154], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005], + device='cuda:1') +2023-03-08 19:16:31,331 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17224.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 19:16:33,374 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.843e+02 4.713e+02 5.523e+02 7.197e+02 1.239e+03, threshold=1.105e+03, percent-clipped=3.0 +2023-03-08 19:16:45,605 INFO [train.py:898] (1/4) Epoch 5, batch 2700, loss[loss=0.2046, simple_loss=0.2828, pruned_loss=0.0632, over 18497.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3087, pruned_loss=0.07949, over 3593444.48 frames. ], batch size: 47, lr: 2.14e-02, grad_scale: 8.0 +2023-03-08 19:17:01,416 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17250.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:17:15,254 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17262.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 19:17:43,514 INFO [train.py:898] (1/4) Epoch 5, batch 2750, loss[loss=0.2187, simple_loss=0.3024, pruned_loss=0.06753, over 18484.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.3084, pruned_loss=0.07922, over 3594164.95 frames. ], batch size: 51, lr: 2.14e-02, grad_scale: 8.0 +2023-03-08 19:17:46,075 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5211, 4.9959, 5.6527, 5.4772, 5.2768, 6.1690, 5.7571, 5.7421], + device='cuda:1'), covar=tensor([0.0776, 0.0515, 0.0586, 0.0473, 0.1262, 0.0630, 0.0500, 0.1247], + device='cuda:1'), in_proj_covar=tensor([0.0250, 0.0189, 0.0195, 0.0186, 0.0238, 0.0279, 0.0179, 0.0273], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-08 19:17:53,128 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5197, 3.7319, 5.2277, 4.4076, 2.9035, 2.5752, 4.3256, 5.1432], + device='cuda:1'), covar=tensor([0.0984, 0.1327, 0.0052, 0.0270, 0.0960, 0.1199, 0.0335, 0.0079], + device='cuda:1'), in_proj_covar=tensor([0.0131, 0.0173, 0.0066, 0.0136, 0.0160, 0.0161, 0.0135, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0002, 0.0003, 0.0002, 0.0001], + device='cuda:1') +2023-03-08 19:18:29,774 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.562e+02 4.313e+02 5.357e+02 6.708e+02 1.255e+03, threshold=1.071e+03, percent-clipped=3.0 +2023-03-08 19:18:42,601 INFO [train.py:898] (1/4) Epoch 5, batch 2800, loss[loss=0.2435, simple_loss=0.3205, pruned_loss=0.08321, over 17832.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3083, pruned_loss=0.07902, over 3592996.10 frames. ], batch size: 70, lr: 2.14e-02, grad_scale: 8.0 +2023-03-08 19:18:44,734 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.67 vs. limit=2.0 +2023-03-08 19:19:12,125 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17362.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:19:36,986 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17383.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 19:19:41,207 INFO [train.py:898] (1/4) Epoch 5, batch 2850, loss[loss=0.2421, simple_loss=0.3221, pruned_loss=0.08107, over 17758.00 frames. ], tot_loss[loss=0.234, simple_loss=0.3093, pruned_loss=0.07934, over 3599607.37 frames. ], batch size: 70, lr: 2.13e-02, grad_scale: 8.0 +2023-03-08 19:20:04,318 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.73 vs. limit=2.0 +2023-03-08 19:20:24,474 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17423.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:20:27,505 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.643e+02 4.399e+02 5.133e+02 6.375e+02 2.228e+03, threshold=1.027e+03, percent-clipped=3.0 +2023-03-08 19:20:40,826 INFO [train.py:898] (1/4) Epoch 5, batch 2900, loss[loss=0.2744, simple_loss=0.3396, pruned_loss=0.1046, over 12394.00 frames. ], tot_loss[loss=0.2348, simple_loss=0.3097, pruned_loss=0.07995, over 3582394.00 frames. ], batch size: 129, lr: 2.13e-02, grad_scale: 8.0 +2023-03-08 19:20:49,165 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17444.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 19:21:05,741 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0705, 2.0285, 3.5210, 3.1156, 3.7328, 5.1883, 4.3513, 4.5465], + device='cuda:1'), covar=tensor([0.0390, 0.0928, 0.0749, 0.0542, 0.0885, 0.0024, 0.0242, 0.0144], + device='cuda:1'), in_proj_covar=tensor([0.0155, 0.0202, 0.0168, 0.0191, 0.0286, 0.0110, 0.0172, 0.0145], + device='cuda:1'), out_proj_covar=tensor([1.1520e-04, 1.5069e-04, 1.3566e-04, 1.3071e-04, 2.1158e-04, 7.5069e-05, + 1.2610e-04, 1.0719e-04], device='cuda:1') +2023-03-08 19:21:38,382 INFO [train.py:898] (1/4) Epoch 5, batch 2950, loss[loss=0.2532, simple_loss=0.3332, pruned_loss=0.08664, over 18299.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.3094, pruned_loss=0.07986, over 3582776.21 frames. ], batch size: 57, lr: 2.13e-02, grad_scale: 8.0 +2023-03-08 19:22:13,592 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.64 vs. limit=5.0 +2023-03-08 19:22:16,603 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8420, 5.4604, 5.5094, 5.3390, 5.0226, 5.3511, 4.6983, 5.2482], + device='cuda:1'), covar=tensor([0.0202, 0.0282, 0.0149, 0.0192, 0.0311, 0.0205, 0.0990, 0.0274], + device='cuda:1'), in_proj_covar=tensor([0.0128, 0.0172, 0.0157, 0.0150, 0.0164, 0.0172, 0.0241, 0.0154], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005], + device='cuda:1') +2023-03-08 19:22:22,599 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17524.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 19:22:24,447 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.921e+02 4.474e+02 5.609e+02 7.240e+02 1.382e+03, threshold=1.122e+03, percent-clipped=5.0 +2023-03-08 19:22:27,336 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-08 19:22:36,713 INFO [train.py:898] (1/4) Epoch 5, batch 3000, loss[loss=0.282, simple_loss=0.3449, pruned_loss=0.1096, over 18497.00 frames. ], tot_loss[loss=0.2339, simple_loss=0.3089, pruned_loss=0.07948, over 3597949.49 frames. ], batch size: 53, lr: 2.12e-02, grad_scale: 8.0 +2023-03-08 19:22:36,714 INFO [train.py:923] (1/4) Computing validation loss +2023-03-08 19:22:48,699 INFO [train.py:932] (1/4) Epoch 5, validation: loss=0.1806, simple_loss=0.2829, pruned_loss=0.03918, over 944034.00 frames. +2023-03-08 19:22:48,700 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19608MB +2023-03-08 19:22:54,822 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.5628, 5.1318, 5.2386, 5.0880, 4.6757, 5.0467, 4.2438, 4.9357], + device='cuda:1'), covar=tensor([0.0211, 0.0360, 0.0205, 0.0230, 0.0397, 0.0216, 0.1252, 0.0279], + device='cuda:1'), in_proj_covar=tensor([0.0128, 0.0173, 0.0156, 0.0150, 0.0164, 0.0170, 0.0240, 0.0153], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005], + device='cuda:1') +2023-03-08 19:22:58,802 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17545.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:23:19,495 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17562.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 19:23:30,587 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17572.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 19:23:47,563 INFO [train.py:898] (1/4) Epoch 5, batch 3050, loss[loss=0.2424, simple_loss=0.3251, pruned_loss=0.07983, over 18081.00 frames. ], tot_loss[loss=0.2353, simple_loss=0.3099, pruned_loss=0.0803, over 3583264.99 frames. ], batch size: 62, lr: 2.12e-02, grad_scale: 8.0 +2023-03-08 19:23:53,478 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2089, 4.3115, 2.2997, 4.3662, 5.1416, 2.4421, 3.8866, 3.9293], + device='cuda:1'), covar=tensor([0.0096, 0.0957, 0.1507, 0.0489, 0.0044, 0.1280, 0.0544, 0.0655], + device='cuda:1'), in_proj_covar=tensor([0.0082, 0.0163, 0.0175, 0.0170, 0.0072, 0.0163, 0.0183, 0.0181], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 19:24:15,396 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17610.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:24:33,683 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.113e+02 4.467e+02 5.746e+02 6.739e+02 1.721e+03, threshold=1.149e+03, percent-clipped=3.0 +2023-03-08 19:24:34,459 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-08 19:24:46,749 INFO [train.py:898] (1/4) Epoch 5, batch 3100, loss[loss=0.2671, simple_loss=0.3381, pruned_loss=0.09804, over 17951.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.309, pruned_loss=0.07976, over 3581521.82 frames. ], batch size: 65, lr: 2.12e-02, grad_scale: 8.0 +2023-03-08 19:25:02,605 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7327, 2.6540, 3.8697, 3.8229, 1.5622, 4.1766, 3.3435, 2.5860], + device='cuda:1'), covar=tensor([0.0359, 0.1500, 0.0191, 0.0239, 0.2279, 0.0201, 0.0558, 0.1213], + device='cuda:1'), in_proj_covar=tensor([0.0151, 0.0185, 0.0102, 0.0112, 0.0188, 0.0140, 0.0152, 0.0173], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 19:25:10,487 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4476, 5.0508, 5.4954, 5.4694, 5.2620, 6.1264, 5.7311, 5.6223], + device='cuda:1'), covar=tensor([0.0672, 0.0562, 0.0636, 0.0468, 0.1248, 0.0652, 0.0451, 0.1182], + device='cuda:1'), in_proj_covar=tensor([0.0242, 0.0185, 0.0190, 0.0179, 0.0227, 0.0271, 0.0176, 0.0257], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-08 19:25:45,587 INFO [train.py:898] (1/4) Epoch 5, batch 3150, loss[loss=0.2595, simple_loss=0.3348, pruned_loss=0.09213, over 18333.00 frames. ], tot_loss[loss=0.233, simple_loss=0.3082, pruned_loss=0.0789, over 3586315.10 frames. ], batch size: 57, lr: 2.12e-02, grad_scale: 8.0 +2023-03-08 19:26:00,881 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17700.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:26:22,155 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17718.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:26:31,147 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.328e+02 4.460e+02 5.383e+02 6.414e+02 1.196e+03, threshold=1.077e+03, percent-clipped=2.0 +2023-03-08 19:26:35,932 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17730.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:26:37,027 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7446, 5.3541, 5.3509, 5.2767, 4.9913, 5.2660, 4.5651, 5.1522], + device='cuda:1'), covar=tensor([0.0214, 0.0227, 0.0167, 0.0177, 0.0306, 0.0192, 0.1124, 0.0243], + device='cuda:1'), in_proj_covar=tensor([0.0128, 0.0173, 0.0159, 0.0153, 0.0167, 0.0171, 0.0241, 0.0154], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005], + device='cuda:1') +2023-03-08 19:26:44,014 INFO [train.py:898] (1/4) Epoch 5, batch 3200, loss[loss=0.2069, simple_loss=0.2895, pruned_loss=0.06213, over 18565.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.3093, pruned_loss=0.07939, over 3591994.39 frames. ], batch size: 45, lr: 2.11e-02, grad_scale: 8.0 +2023-03-08 19:26:46,686 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17739.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 19:27:12,028 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17761.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:27:42,483 INFO [train.py:898] (1/4) Epoch 5, batch 3250, loss[loss=0.2047, simple_loss=0.2787, pruned_loss=0.06536, over 18477.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.3082, pruned_loss=0.0788, over 3601771.53 frames. ], batch size: 44, lr: 2.11e-02, grad_scale: 8.0 +2023-03-08 19:27:47,468 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17791.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:28:07,611 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3735, 5.2388, 4.7404, 5.2462, 5.1938, 4.6658, 5.2249, 4.8077], + device='cuda:1'), covar=tensor([0.0323, 0.0471, 0.1460, 0.0604, 0.0518, 0.0375, 0.0271, 0.0791], + device='cuda:1'), in_proj_covar=tensor([0.0308, 0.0345, 0.0503, 0.0270, 0.0255, 0.0323, 0.0331, 0.0436], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-08 19:28:28,463 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.849e+02 4.466e+02 5.558e+02 6.730e+02 1.712e+03, threshold=1.112e+03, percent-clipped=5.0 +2023-03-08 19:28:40,499 INFO [train.py:898] (1/4) Epoch 5, batch 3300, loss[loss=0.2391, simple_loss=0.3214, pruned_loss=0.0784, over 18562.00 frames. ], tot_loss[loss=0.233, simple_loss=0.3085, pruned_loss=0.07878, over 3609709.68 frames. ], batch size: 54, lr: 2.11e-02, grad_scale: 16.0 +2023-03-08 19:28:50,401 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17845.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:29:26,043 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.5709, 5.2461, 5.3257, 5.1330, 4.8321, 5.1000, 4.3677, 5.0766], + device='cuda:1'), covar=tensor([0.0205, 0.0302, 0.0195, 0.0244, 0.0374, 0.0223, 0.1369, 0.0252], + device='cuda:1'), in_proj_covar=tensor([0.0125, 0.0172, 0.0157, 0.0154, 0.0165, 0.0170, 0.0242, 0.0152], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005], + device='cuda:1') +2023-03-08 19:29:38,681 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.59 vs. limit=2.0 +2023-03-08 19:29:39,236 INFO [train.py:898] (1/4) Epoch 5, batch 3350, loss[loss=0.3061, simple_loss=0.3619, pruned_loss=0.1252, over 12442.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.3085, pruned_loss=0.07833, over 3606165.46 frames. ], batch size: 130, lr: 2.11e-02, grad_scale: 16.0 +2023-03-08 19:29:46,126 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17893.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:30:25,126 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.611e+02 4.192e+02 5.439e+02 6.827e+02 1.413e+03, threshold=1.088e+03, percent-clipped=2.0 +2023-03-08 19:30:30,549 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4906, 2.6143, 3.5320, 3.5767, 2.3869, 3.8900, 3.5965, 2.5436], + device='cuda:1'), covar=tensor([0.0265, 0.1039, 0.0178, 0.0160, 0.1282, 0.0128, 0.0391, 0.0824], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0186, 0.0102, 0.0111, 0.0186, 0.0141, 0.0149, 0.0174], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 19:30:31,615 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0293, 4.9674, 4.5725, 4.9509, 4.9225, 4.4710, 4.9126, 4.6100], + device='cuda:1'), covar=tensor([0.0420, 0.0484, 0.1365, 0.0662, 0.0477, 0.0436, 0.0323, 0.0736], + device='cuda:1'), in_proj_covar=tensor([0.0310, 0.0347, 0.0503, 0.0271, 0.0257, 0.0323, 0.0331, 0.0431], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-08 19:30:38,006 INFO [train.py:898] (1/4) Epoch 5, batch 3400, loss[loss=0.2543, simple_loss=0.3306, pruned_loss=0.08899, over 18632.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3079, pruned_loss=0.07827, over 3599618.63 frames. ], batch size: 52, lr: 2.10e-02, grad_scale: 16.0 +2023-03-08 19:31:02,928 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.03 vs. limit=2.0 +2023-03-08 19:31:04,959 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17960.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:31:36,930 INFO [train.py:898] (1/4) Epoch 5, batch 3450, loss[loss=0.2374, simple_loss=0.3119, pruned_loss=0.08143, over 18402.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.308, pruned_loss=0.07826, over 3597113.55 frames. ], batch size: 50, lr: 2.10e-02, grad_scale: 16.0 +2023-03-08 19:31:44,626 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7707, 4.5603, 4.8137, 4.4175, 4.5918, 4.6433, 4.9748, 4.9628], + device='cuda:1'), covar=tensor([0.0069, 0.0113, 0.0085, 0.0116, 0.0093, 0.0129, 0.0103, 0.0144], + device='cuda:1'), in_proj_covar=tensor([0.0063, 0.0048, 0.0048, 0.0061, 0.0054, 0.0070, 0.0058, 0.0059], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 19:32:17,505 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18018.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:32:20,888 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18021.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:32:26,801 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.873e+02 4.562e+02 5.412e+02 6.904e+02 1.746e+03, threshold=1.082e+03, percent-clipped=5.0 +2023-03-08 19:32:39,619 INFO [train.py:898] (1/4) Epoch 5, batch 3500, loss[loss=0.2336, simple_loss=0.314, pruned_loss=0.0766, over 18626.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.3093, pruned_loss=0.07912, over 3581508.45 frames. ], batch size: 52, lr: 2.10e-02, grad_scale: 16.0 +2023-03-08 19:32:42,178 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18039.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 19:33:01,677 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18056.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:33:12,385 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18066.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:33:33,599 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18086.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:33:34,497 INFO [train.py:898] (1/4) Epoch 5, batch 3550, loss[loss=0.2279, simple_loss=0.3097, pruned_loss=0.073, over 18301.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.3087, pruned_loss=0.07874, over 3592014.54 frames. ], batch size: 54, lr: 2.09e-02, grad_scale: 16.0 +2023-03-08 19:33:34,630 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18087.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 19:33:35,133 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-08 19:34:07,565 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-03-08 19:34:17,502 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.924e+02 4.145e+02 5.139e+02 6.642e+02 1.282e+03, threshold=1.028e+03, percent-clipped=2.0 +2023-03-08 19:34:29,570 INFO [train.py:898] (1/4) Epoch 5, batch 3600, loss[loss=0.2091, simple_loss=0.291, pruned_loss=0.06366, over 18507.00 frames. ], tot_loss[loss=0.2323, simple_loss=0.3083, pruned_loss=0.07818, over 3600024.92 frames. ], batch size: 51, lr: 2.09e-02, grad_scale: 16.0 +2023-03-08 19:35:34,914 INFO [train.py:898] (1/4) Epoch 6, batch 0, loss[loss=0.2525, simple_loss=0.3299, pruned_loss=0.08758, over 18301.00 frames. ], tot_loss[loss=0.2525, simple_loss=0.3299, pruned_loss=0.08758, over 18301.00 frames. ], batch size: 57, lr: 1.95e-02, grad_scale: 16.0 +2023-03-08 19:35:34,914 INFO [train.py:923] (1/4) Computing validation loss +2023-03-08 19:35:45,381 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.3869, 4.1440, 4.2759, 4.0585, 4.1838, 4.1322, 4.4819, 4.4494], + device='cuda:1'), covar=tensor([0.0058, 0.0091, 0.0075, 0.0092, 0.0071, 0.0104, 0.0081, 0.0095], + device='cuda:1'), in_proj_covar=tensor([0.0061, 0.0046, 0.0047, 0.0058, 0.0052, 0.0068, 0.0057, 0.0056], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 19:35:46,592 INFO [train.py:932] (1/4) Epoch 6, validation: loss=0.1816, simple_loss=0.2843, pruned_loss=0.0395, over 944034.00 frames. +2023-03-08 19:35:46,593 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19608MB +2023-03-08 19:35:50,319 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18174.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:35:51,435 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18175.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:36:04,262 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-03-08 19:36:16,579 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-08 19:36:19,197 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.93 vs. limit=2.0 +2023-03-08 19:36:35,229 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-08 19:36:44,909 INFO [train.py:898] (1/4) Epoch 6, batch 50, loss[loss=0.1737, simple_loss=0.2511, pruned_loss=0.04815, over 18498.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3032, pruned_loss=0.07591, over 813974.70 frames. ], batch size: 44, lr: 1.95e-02, grad_scale: 8.0 +2023-03-08 19:36:50,533 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7486, 1.7378, 2.9436, 2.6885, 3.6213, 5.0233, 4.2717, 4.4456], + device='cuda:1'), covar=tensor([0.0478, 0.1151, 0.1077, 0.0774, 0.1077, 0.0034, 0.0260, 0.0148], + device='cuda:1'), in_proj_covar=tensor([0.0158, 0.0207, 0.0181, 0.0197, 0.0286, 0.0112, 0.0175, 0.0146], + device='cuda:1'), out_proj_covar=tensor([1.1491e-04, 1.5133e-04, 1.4201e-04, 1.3344e-04, 2.0950e-04, 7.5402e-05, + 1.2705e-04, 1.0646e-04], device='cuda:1') +2023-03-08 19:36:52,258 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.724e+02 4.768e+02 5.691e+02 6.790e+02 1.877e+03, threshold=1.138e+03, percent-clipped=9.0 +2023-03-08 19:37:01,547 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18235.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:37:02,642 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18236.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:37:43,156 INFO [train.py:898] (1/4) Epoch 6, batch 100, loss[loss=0.2395, simple_loss=0.3181, pruned_loss=0.08044, over 16313.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3016, pruned_loss=0.075, over 1433375.61 frames. ], batch size: 94, lr: 1.95e-02, grad_scale: 8.0 +2023-03-08 19:38:03,225 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4399, 3.1194, 4.1782, 3.9423, 2.8646, 4.5937, 4.0815, 2.5446], + device='cuda:1'), covar=tensor([0.0392, 0.0879, 0.0138, 0.0193, 0.1146, 0.0100, 0.0277, 0.1035], + device='cuda:1'), in_proj_covar=tensor([0.0151, 0.0190, 0.0101, 0.0113, 0.0186, 0.0142, 0.0152, 0.0178], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 19:38:36,454 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18316.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:38:41,335 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=2.02 vs. limit=2.0 +2023-03-08 19:38:41,805 INFO [train.py:898] (1/4) Epoch 6, batch 150, loss[loss=0.3006, simple_loss=0.3511, pruned_loss=0.125, over 12320.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.3036, pruned_loss=0.07611, over 1904064.56 frames. ], batch size: 130, lr: 1.94e-02, grad_scale: 8.0 +2023-03-08 19:38:48,541 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.997e+02 4.126e+02 4.935e+02 6.131e+02 1.362e+03, threshold=9.869e+02, percent-clipped=2.0 +2023-03-08 19:39:22,903 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18356.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:39:40,016 INFO [train.py:898] (1/4) Epoch 6, batch 200, loss[loss=0.2435, simple_loss=0.3237, pruned_loss=0.0816, over 16171.00 frames. ], tot_loss[loss=0.2288, simple_loss=0.3043, pruned_loss=0.07667, over 2271104.99 frames. ], batch size: 95, lr: 1.94e-02, grad_scale: 8.0 +2023-03-08 19:39:46,057 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7269, 1.9328, 2.7726, 2.5285, 3.2055, 4.4096, 3.9391, 3.7700], + device='cuda:1'), covar=tensor([0.0454, 0.1048, 0.0955, 0.0739, 0.1057, 0.0048, 0.0233, 0.0182], + device='cuda:1'), in_proj_covar=tensor([0.0159, 0.0207, 0.0183, 0.0193, 0.0286, 0.0113, 0.0177, 0.0146], + device='cuda:1'), out_proj_covar=tensor([1.1532e-04, 1.5116e-04, 1.4238e-04, 1.3110e-04, 2.0928e-04, 7.6113e-05, + 1.2777e-04, 1.0618e-04], device='cuda:1') +2023-03-08 19:39:57,722 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18386.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:40:08,981 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5614, 3.3322, 1.6850, 4.2505, 2.9119, 4.5296, 1.9429, 4.0424], + device='cuda:1'), covar=tensor([0.0438, 0.0814, 0.1570, 0.0302, 0.0920, 0.0117, 0.1284, 0.0283], + device='cuda:1'), in_proj_covar=tensor([0.0156, 0.0191, 0.0162, 0.0169, 0.0169, 0.0128, 0.0170, 0.0158], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 19:40:19,048 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18404.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:40:39,270 INFO [train.py:898] (1/4) Epoch 6, batch 250, loss[loss=0.1898, simple_loss=0.2645, pruned_loss=0.05753, over 18433.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.3023, pruned_loss=0.07537, over 2565325.30 frames. ], batch size: 43, lr: 1.94e-02, grad_scale: 8.0 +2023-03-08 19:40:46,044 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.187e+02 4.523e+02 5.806e+02 6.957e+02 1.437e+03, threshold=1.161e+03, percent-clipped=4.0 +2023-03-08 19:40:54,354 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18434.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:41:17,252 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-03-08 19:41:38,853 INFO [train.py:898] (1/4) Epoch 6, batch 300, loss[loss=0.2587, simple_loss=0.3337, pruned_loss=0.0919, over 16908.00 frames. ], tot_loss[loss=0.2257, simple_loss=0.3017, pruned_loss=0.07488, over 2785422.27 frames. ], batch size: 78, lr: 1.94e-02, grad_scale: 8.0 +2023-03-08 19:42:01,366 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18490.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:42:21,883 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18508.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 19:42:36,902 INFO [train.py:898] (1/4) Epoch 6, batch 350, loss[loss=0.2959, simple_loss=0.35, pruned_loss=0.1209, over 12713.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3016, pruned_loss=0.07456, over 2972651.27 frames. ], batch size: 131, lr: 1.93e-02, grad_scale: 8.0 +2023-03-08 19:42:44,091 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.993e+02 4.095e+02 4.916e+02 6.743e+02 1.094e+03, threshold=9.831e+02, percent-clipped=0.0 +2023-03-08 19:42:47,459 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18530.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:42:48,606 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18531.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:42:58,786 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0793, 4.6097, 4.6347, 4.6273, 4.2744, 4.5278, 3.6713, 4.4228], + device='cuda:1'), covar=tensor([0.0265, 0.0391, 0.0311, 0.0272, 0.0399, 0.0293, 0.1619, 0.0352], + device='cuda:1'), in_proj_covar=tensor([0.0128, 0.0175, 0.0159, 0.0157, 0.0166, 0.0174, 0.0242, 0.0155], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005], + device='cuda:1') +2023-03-08 19:43:11,113 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18551.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:43:31,844 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18569.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 19:43:33,754 INFO [train.py:898] (1/4) Epoch 6, batch 400, loss[loss=0.2279, simple_loss=0.3027, pruned_loss=0.07656, over 18345.00 frames. ], tot_loss[loss=0.227, simple_loss=0.3031, pruned_loss=0.07548, over 3103811.72 frames. ], batch size: 46, lr: 1.93e-02, grad_scale: 8.0 +2023-03-08 19:44:26,345 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18616.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:44:31,603 INFO [train.py:898] (1/4) Epoch 6, batch 450, loss[loss=0.2148, simple_loss=0.2929, pruned_loss=0.06828, over 18383.00 frames. ], tot_loss[loss=0.2279, simple_loss=0.3038, pruned_loss=0.07595, over 3209634.85 frames. ], batch size: 50, lr: 1.93e-02, grad_scale: 8.0 +2023-03-08 19:44:38,640 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.446e+02 4.323e+02 5.172e+02 6.972e+02 1.405e+03, threshold=1.034e+03, percent-clipped=7.0 +2023-03-08 19:44:51,258 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8150, 3.5304, 3.4966, 3.1070, 3.5098, 2.6441, 2.3906, 3.8815], + device='cuda:1'), covar=tensor([0.0025, 0.0075, 0.0057, 0.0119, 0.0056, 0.0170, 0.0242, 0.0039], + device='cuda:1'), in_proj_covar=tensor([0.0058, 0.0073, 0.0068, 0.0106, 0.0069, 0.0110, 0.0115, 0.0062], + device='cuda:1'), out_proj_covar=tensor([8.3941e-05, 1.1630e-04, 1.0611e-04, 1.7235e-04, 1.0512e-04, 1.7591e-04, + 1.8007e-04, 9.5523e-05], device='cuda:1') +2023-03-08 19:44:58,911 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7247, 4.7477, 4.7589, 4.5205, 4.7155, 4.5896, 5.0182, 4.9414], + device='cuda:1'), covar=tensor([0.0068, 0.0108, 0.0069, 0.0098, 0.0064, 0.0112, 0.0081, 0.0096], + device='cuda:1'), in_proj_covar=tensor([0.0064, 0.0049, 0.0048, 0.0061, 0.0053, 0.0070, 0.0060, 0.0059], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 19:44:58,958 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8349, 4.3876, 4.6166, 3.2429, 3.4549, 3.5075, 2.3341, 1.9083], + device='cuda:1'), covar=tensor([0.0158, 0.0207, 0.0035, 0.0301, 0.0359, 0.0139, 0.0748, 0.1060], + device='cuda:1'), in_proj_covar=tensor([0.0041, 0.0039, 0.0033, 0.0048, 0.0064, 0.0041, 0.0061, 0.0068], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004], + device='cuda:1') +2023-03-08 19:45:20,699 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18664.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:45:29,549 INFO [train.py:898] (1/4) Epoch 6, batch 500, loss[loss=0.1904, simple_loss=0.2661, pruned_loss=0.05735, over 18351.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.3037, pruned_loss=0.07591, over 3292953.91 frames. ], batch size: 46, lr: 1.93e-02, grad_scale: 8.0 +2023-03-08 19:45:33,103 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18674.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:46:00,126 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4114, 5.3118, 4.8693, 5.3148, 5.3392, 4.6723, 5.3016, 4.9563], + device='cuda:1'), covar=tensor([0.0335, 0.0369, 0.1355, 0.0685, 0.0398, 0.0448, 0.0289, 0.0758], + device='cuda:1'), in_proj_covar=tensor([0.0316, 0.0359, 0.0507, 0.0284, 0.0258, 0.0330, 0.0338, 0.0441], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-08 19:46:23,173 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.69 vs. limit=5.0 +2023-03-08 19:46:27,731 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.5054, 5.1647, 5.2422, 5.2440, 4.6915, 5.0385, 4.0268, 4.9243], + device='cuda:1'), covar=tensor([0.0284, 0.0372, 0.0274, 0.0217, 0.0455, 0.0306, 0.1790, 0.0398], + device='cuda:1'), in_proj_covar=tensor([0.0131, 0.0175, 0.0161, 0.0159, 0.0168, 0.0174, 0.0242, 0.0156], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005], + device='cuda:1') +2023-03-08 19:46:28,531 INFO [train.py:898] (1/4) Epoch 6, batch 550, loss[loss=0.2126, simple_loss=0.2862, pruned_loss=0.06945, over 18248.00 frames. ], tot_loss[loss=0.2275, simple_loss=0.3034, pruned_loss=0.07576, over 3353715.89 frames. ], batch size: 47, lr: 1.92e-02, grad_scale: 8.0 +2023-03-08 19:46:35,267 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.775e+02 4.304e+02 5.425e+02 6.689e+02 1.717e+03, threshold=1.085e+03, percent-clipped=5.0 +2023-03-08 19:46:45,267 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18735.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:46:46,886 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.60 vs. limit=2.0 +2023-03-08 19:46:59,266 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.96 vs. limit=2.0 +2023-03-08 19:47:12,068 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.38 vs. limit=5.0 +2023-03-08 19:47:26,400 INFO [train.py:898] (1/4) Epoch 6, batch 600, loss[loss=0.2329, simple_loss=0.3139, pruned_loss=0.07594, over 17116.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.3029, pruned_loss=0.07464, over 3406252.55 frames. ], batch size: 78, lr: 1.92e-02, grad_scale: 8.0 +2023-03-08 19:47:35,647 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-08 19:48:14,943 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5900, 1.9255, 3.0133, 2.7673, 3.3450, 4.8060, 4.1138, 3.5306], + device='cuda:1'), covar=tensor([0.0669, 0.1478, 0.1234, 0.0847, 0.1470, 0.0044, 0.0359, 0.0353], + device='cuda:1'), in_proj_covar=tensor([0.0165, 0.0214, 0.0195, 0.0201, 0.0297, 0.0119, 0.0185, 0.0150], + device='cuda:1'), out_proj_covar=tensor([1.1937e-04, 1.5561e-04, 1.5045e-04, 1.3624e-04, 2.1535e-04, 7.9538e-05, + 1.3399e-04, 1.0862e-04], device='cuda:1') +2023-03-08 19:48:25,654 INFO [train.py:898] (1/4) Epoch 6, batch 650, loss[loss=0.2004, simple_loss=0.2835, pruned_loss=0.05865, over 18405.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3027, pruned_loss=0.07422, over 3455547.17 frames. ], batch size: 50, lr: 1.92e-02, grad_scale: 8.0 +2023-03-08 19:48:33,825 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.489e+02 4.300e+02 5.176e+02 5.986e+02 1.905e+03, threshold=1.035e+03, percent-clipped=4.0 +2023-03-08 19:48:37,420 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18830.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:48:38,581 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18831.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:48:39,127 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-08 19:48:53,918 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.2356, 2.4072, 2.2882, 2.7698, 3.3139, 3.2360, 2.6250, 2.9575], + device='cuda:1'), covar=tensor([0.0175, 0.0373, 0.0770, 0.0350, 0.0203, 0.0125, 0.0468, 0.0326], + device='cuda:1'), in_proj_covar=tensor([0.0101, 0.0076, 0.0135, 0.0108, 0.0082, 0.0061, 0.0101, 0.0107], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 19:48:56,013 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18846.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:49:17,267 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18864.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 19:49:17,577 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9052, 1.9045, 3.1395, 2.9663, 3.8496, 5.1626, 4.6100, 4.5957], + device='cuda:1'), covar=tensor([0.0467, 0.1117, 0.0995, 0.0676, 0.0876, 0.0034, 0.0225, 0.0163], + device='cuda:1'), in_proj_covar=tensor([0.0165, 0.0213, 0.0194, 0.0200, 0.0295, 0.0120, 0.0184, 0.0151], + device='cuda:1'), out_proj_covar=tensor([1.1932e-04, 1.5496e-04, 1.4980e-04, 1.3518e-04, 2.1397e-04, 8.0544e-05, + 1.3267e-04, 1.0870e-04], device='cuda:1') +2023-03-08 19:49:20,872 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8426, 2.9360, 4.1224, 4.1455, 2.6456, 4.6649, 4.0288, 2.7987], + device='cuda:1'), covar=tensor([0.0298, 0.0968, 0.0184, 0.0174, 0.1309, 0.0110, 0.0276, 0.0973], + device='cuda:1'), in_proj_covar=tensor([0.0152, 0.0189, 0.0106, 0.0116, 0.0191, 0.0149, 0.0154, 0.0178], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 19:49:25,151 INFO [train.py:898] (1/4) Epoch 6, batch 700, loss[loss=0.2333, simple_loss=0.3053, pruned_loss=0.0807, over 18625.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3026, pruned_loss=0.0743, over 3485646.05 frames. ], batch size: 52, lr: 1.92e-02, grad_scale: 8.0 +2023-03-08 19:49:32,533 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0400, 4.9833, 5.1055, 5.0126, 4.9003, 5.6515, 5.0913, 5.0930], + device='cuda:1'), covar=tensor([0.0849, 0.0697, 0.0610, 0.0529, 0.1219, 0.0754, 0.0570, 0.1497], + device='cuda:1'), in_proj_covar=tensor([0.0258, 0.0191, 0.0201, 0.0192, 0.0240, 0.0284, 0.0185, 0.0276], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-08 19:49:33,683 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18878.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:49:34,639 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18879.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:50:23,144 INFO [train.py:898] (1/4) Epoch 6, batch 750, loss[loss=0.2497, simple_loss=0.3318, pruned_loss=0.0838, over 16027.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.3031, pruned_loss=0.07404, over 3512393.95 frames. ], batch size: 94, lr: 1.91e-02, grad_scale: 8.0 +2023-03-08 19:50:29,884 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.737e+02 4.561e+02 5.480e+02 6.846e+02 1.883e+03, threshold=1.096e+03, percent-clipped=6.0 +2023-03-08 19:50:33,977 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18930.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:51:11,150 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7243, 5.3982, 5.3411, 5.2711, 4.9011, 5.1508, 4.5572, 5.1486], + device='cuda:1'), covar=tensor([0.0190, 0.0202, 0.0165, 0.0179, 0.0338, 0.0196, 0.1050, 0.0213], + device='cuda:1'), in_proj_covar=tensor([0.0132, 0.0179, 0.0164, 0.0163, 0.0171, 0.0178, 0.0248, 0.0158], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005], + device='cuda:1') +2023-03-08 19:51:21,251 INFO [train.py:898] (1/4) Epoch 6, batch 800, loss[loss=0.2837, simple_loss=0.3441, pruned_loss=0.1117, over 12187.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3016, pruned_loss=0.07359, over 3535633.01 frames. ], batch size: 130, lr: 1.91e-02, grad_scale: 8.0 +2023-03-08 19:51:46,395 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18991.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:52:20,914 INFO [train.py:898] (1/4) Epoch 6, batch 850, loss[loss=0.2386, simple_loss=0.3171, pruned_loss=0.08006, over 18407.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.302, pruned_loss=0.07423, over 3535026.38 frames. ], batch size: 52, lr: 1.91e-02, grad_scale: 8.0 +2023-03-08 19:52:28,233 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.682e+02 3.950e+02 4.754e+02 5.949e+02 2.076e+03, threshold=9.508e+02, percent-clipped=3.0 +2023-03-08 19:52:31,927 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19030.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:53:19,106 INFO [train.py:898] (1/4) Epoch 6, batch 900, loss[loss=0.232, simple_loss=0.3091, pruned_loss=0.07743, over 18270.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.301, pruned_loss=0.07357, over 3558556.14 frames. ], batch size: 60, lr: 1.91e-02, grad_scale: 8.0 +2023-03-08 19:54:17,492 INFO [train.py:898] (1/4) Epoch 6, batch 950, loss[loss=0.2175, simple_loss=0.3013, pruned_loss=0.06686, over 18642.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.302, pruned_loss=0.07374, over 3564136.38 frames. ], batch size: 52, lr: 1.90e-02, grad_scale: 8.0 +2023-03-08 19:54:24,267 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.702e+02 4.325e+02 5.195e+02 6.173e+02 1.123e+03, threshold=1.039e+03, percent-clipped=4.0 +2023-03-08 19:54:26,854 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19129.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 19:54:47,342 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19146.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:55:08,093 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19164.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 19:55:15,682 INFO [train.py:898] (1/4) Epoch 6, batch 1000, loss[loss=0.2199, simple_loss=0.2861, pruned_loss=0.07683, over 18251.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.3015, pruned_loss=0.0737, over 3562962.81 frames. ], batch size: 45, lr: 1.90e-02, grad_scale: 8.0 +2023-03-08 19:55:38,229 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19190.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 19:55:40,895 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.57 vs. limit=2.0 +2023-03-08 19:55:42,535 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19194.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:55:42,739 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8489, 2.5093, 4.2046, 4.3015, 2.4916, 4.5378, 3.7560, 2.6786], + device='cuda:1'), covar=tensor([0.0282, 0.1188, 0.0105, 0.0131, 0.1325, 0.0128, 0.0346, 0.0925], + device='cuda:1'), in_proj_covar=tensor([0.0152, 0.0188, 0.0108, 0.0116, 0.0191, 0.0152, 0.0159, 0.0179], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 19:56:05,321 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19212.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 19:56:09,772 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8784, 4.8213, 4.8842, 4.6795, 4.7485, 4.7070, 5.1317, 5.0814], + device='cuda:1'), covar=tensor([0.0055, 0.0061, 0.0062, 0.0083, 0.0051, 0.0089, 0.0065, 0.0077], + device='cuda:1'), in_proj_covar=tensor([0.0067, 0.0049, 0.0049, 0.0063, 0.0054, 0.0073, 0.0062, 0.0060], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 19:56:15,306 INFO [train.py:898] (1/4) Epoch 6, batch 1050, loss[loss=0.2526, simple_loss=0.3309, pruned_loss=0.08712, over 16065.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3016, pruned_loss=0.07388, over 3557951.44 frames. ], batch size: 95, lr: 1.90e-02, grad_scale: 8.0 +2023-03-08 19:56:21,992 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.826e+02 4.167e+02 5.198e+02 6.760e+02 1.282e+03, threshold=1.040e+03, percent-clipped=3.0 +2023-03-08 19:57:14,184 INFO [train.py:898] (1/4) Epoch 6, batch 1100, loss[loss=0.2543, simple_loss=0.3284, pruned_loss=0.09012, over 18090.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.302, pruned_loss=0.07406, over 3567041.49 frames. ], batch size: 62, lr: 1.90e-02, grad_scale: 8.0 +2023-03-08 19:57:31,195 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19286.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:57:31,574 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.28 vs. limit=5.0 +2023-03-08 19:58:13,170 INFO [train.py:898] (1/4) Epoch 6, batch 1150, loss[loss=0.1959, simple_loss=0.2667, pruned_loss=0.06253, over 18485.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3011, pruned_loss=0.0739, over 3568414.83 frames. ], batch size: 44, lr: 1.90e-02, grad_scale: 8.0 +2023-03-08 19:58:15,876 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7389, 4.2314, 2.6863, 3.9072, 4.0336, 4.2159, 4.0712, 2.5881], + device='cuda:1'), covar=tensor([0.0124, 0.0055, 0.0604, 0.0200, 0.0070, 0.0058, 0.0095, 0.0829], + device='cuda:1'), in_proj_covar=tensor([0.0064, 0.0053, 0.0083, 0.0066, 0.0062, 0.0052, 0.0066, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-08 19:58:20,059 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.317e+02 3.874e+02 4.905e+02 6.066e+02 2.100e+03, threshold=9.811e+02, percent-clipped=2.0 +2023-03-08 19:58:23,674 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19330.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:58:49,314 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0189, 1.9551, 3.1075, 3.0189, 4.0498, 5.3472, 4.5789, 4.5074], + device='cuda:1'), covar=tensor([0.0459, 0.1107, 0.1058, 0.0685, 0.0841, 0.0026, 0.0235, 0.0165], + device='cuda:1'), in_proj_covar=tensor([0.0167, 0.0215, 0.0200, 0.0204, 0.0299, 0.0122, 0.0186, 0.0151], + device='cuda:1'), out_proj_covar=tensor([1.1903e-04, 1.5460e-04, 1.5257e-04, 1.3675e-04, 2.1581e-04, 8.1791e-05, + 1.3220e-04, 1.0791e-04], device='cuda:1') +2023-03-08 19:59:11,798 INFO [train.py:898] (1/4) Epoch 6, batch 1200, loss[loss=0.2469, simple_loss=0.324, pruned_loss=0.0849, over 18370.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3014, pruned_loss=0.07401, over 3572528.40 frames. ], batch size: 56, lr: 1.89e-02, grad_scale: 8.0 +2023-03-08 19:59:20,001 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19378.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:59:33,493 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7462, 2.8223, 4.2176, 4.2573, 2.3388, 4.5220, 3.9682, 2.7255], + device='cuda:1'), covar=tensor([0.0274, 0.1039, 0.0123, 0.0141, 0.1426, 0.0130, 0.0259, 0.1026], + device='cuda:1'), in_proj_covar=tensor([0.0153, 0.0192, 0.0108, 0.0118, 0.0194, 0.0151, 0.0156, 0.0181], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 19:59:37,941 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19394.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:00:05,791 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-03-08 20:00:10,014 INFO [train.py:898] (1/4) Epoch 6, batch 1250, loss[loss=0.2284, simple_loss=0.3061, pruned_loss=0.07538, over 18497.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3013, pruned_loss=0.07388, over 3585124.67 frames. ], batch size: 51, lr: 1.89e-02, grad_scale: 8.0 +2023-03-08 20:00:16,826 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.768e+02 4.335e+02 5.425e+02 6.798e+02 1.467e+03, threshold=1.085e+03, percent-clipped=6.0 +2023-03-08 20:00:34,452 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4615, 4.5423, 2.5335, 4.2419, 5.3485, 2.4744, 4.2237, 3.9150], + device='cuda:1'), covar=tensor([0.0049, 0.0765, 0.1367, 0.0478, 0.0040, 0.1293, 0.0466, 0.0681], + device='cuda:1'), in_proj_covar=tensor([0.0087, 0.0171, 0.0171, 0.0169, 0.0075, 0.0158, 0.0181, 0.0182], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 20:00:49,610 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19455.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:01:08,436 INFO [train.py:898] (1/4) Epoch 6, batch 1300, loss[loss=0.2045, simple_loss=0.2819, pruned_loss=0.06361, over 18403.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3018, pruned_loss=0.07399, over 3578219.89 frames. ], batch size: 48, lr: 1.89e-02, grad_scale: 8.0 +2023-03-08 20:01:25,223 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19485.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 20:01:46,219 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.3689, 3.5753, 5.0715, 4.0142, 3.3034, 2.8530, 4.3399, 5.1689], + device='cuda:1'), covar=tensor([0.0931, 0.1432, 0.0048, 0.0339, 0.0705, 0.1045, 0.0288, 0.0076], + device='cuda:1'), in_proj_covar=tensor([0.0134, 0.0190, 0.0072, 0.0143, 0.0160, 0.0165, 0.0144, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 20:01:48,876 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.96 vs. limit=5.0 +2023-03-08 20:02:07,618 INFO [train.py:898] (1/4) Epoch 6, batch 1350, loss[loss=0.2011, simple_loss=0.272, pruned_loss=0.06506, over 17682.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3026, pruned_loss=0.07415, over 3588549.45 frames. ], batch size: 39, lr: 1.89e-02, grad_scale: 8.0 +2023-03-08 20:02:15,079 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.530e+02 3.979e+02 4.823e+02 6.493e+02 1.049e+03, threshold=9.645e+02, percent-clipped=0.0 +2023-03-08 20:02:36,368 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7889, 4.7903, 4.8189, 4.5615, 4.6068, 4.6087, 5.0786, 5.0423], + device='cuda:1'), covar=tensor([0.0065, 0.0077, 0.0076, 0.0099, 0.0073, 0.0113, 0.0090, 0.0099], + device='cuda:1'), in_proj_covar=tensor([0.0065, 0.0050, 0.0049, 0.0063, 0.0054, 0.0072, 0.0061, 0.0059], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 20:03:00,544 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9716, 4.6137, 4.6752, 3.4923, 3.9769, 3.8237, 2.5438, 1.9939], + device='cuda:1'), covar=tensor([0.0145, 0.0127, 0.0055, 0.0220, 0.0214, 0.0169, 0.0676, 0.1000], + device='cuda:1'), in_proj_covar=tensor([0.0044, 0.0042, 0.0035, 0.0049, 0.0068, 0.0043, 0.0066, 0.0071], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004], + device='cuda:1') +2023-03-08 20:03:05,923 INFO [train.py:898] (1/4) Epoch 6, batch 1400, loss[loss=0.2422, simple_loss=0.3182, pruned_loss=0.08312, over 18343.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3018, pruned_loss=0.07385, over 3604294.49 frames. ], batch size: 56, lr: 1.88e-02, grad_scale: 8.0 +2023-03-08 20:03:23,781 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19586.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:03:54,216 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.1548, 3.5635, 4.9485, 4.1167, 3.0198, 2.6475, 4.2302, 5.0575], + device='cuda:1'), covar=tensor([0.0948, 0.1209, 0.0055, 0.0271, 0.0761, 0.1085, 0.0299, 0.0073], + device='cuda:1'), in_proj_covar=tensor([0.0133, 0.0189, 0.0072, 0.0140, 0.0160, 0.0165, 0.0144, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 20:04:01,596 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19618.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:04:04,687 INFO [train.py:898] (1/4) Epoch 6, batch 1450, loss[loss=0.2631, simple_loss=0.3172, pruned_loss=0.1045, over 12784.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3025, pruned_loss=0.07365, over 3601037.92 frames. ], batch size: 129, lr: 1.88e-02, grad_scale: 8.0 +2023-03-08 20:04:11,636 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.837e+02 4.168e+02 4.993e+02 6.128e+02 1.216e+03, threshold=9.987e+02, percent-clipped=6.0 +2023-03-08 20:04:20,344 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19634.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:05:03,126 INFO [train.py:898] (1/4) Epoch 6, batch 1500, loss[loss=0.1995, simple_loss=0.2762, pruned_loss=0.06142, over 18457.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3021, pruned_loss=0.07377, over 3599624.44 frames. ], batch size: 43, lr: 1.88e-02, grad_scale: 8.0 +2023-03-08 20:05:06,899 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0616, 4.2733, 2.2154, 4.4234, 5.0921, 2.3112, 3.7470, 3.9214], + device='cuda:1'), covar=tensor([0.0089, 0.0709, 0.1688, 0.0438, 0.0042, 0.1401, 0.0680, 0.0604], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0174, 0.0177, 0.0170, 0.0075, 0.0161, 0.0183, 0.0183], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 20:05:12,448 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19679.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:05:21,837 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1701, 2.5908, 2.2402, 2.6567, 3.2792, 3.3565, 2.8110, 2.7077], + device='cuda:1'), covar=tensor([0.0297, 0.0225, 0.0784, 0.0385, 0.0225, 0.0152, 0.0394, 0.0414], + device='cuda:1'), in_proj_covar=tensor([0.0107, 0.0076, 0.0138, 0.0115, 0.0083, 0.0062, 0.0105, 0.0109], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 20:06:01,664 INFO [train.py:898] (1/4) Epoch 6, batch 1550, loss[loss=0.1882, simple_loss=0.2645, pruned_loss=0.05591, over 18441.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3021, pruned_loss=0.07366, over 3588477.55 frames. ], batch size: 43, lr: 1.88e-02, grad_scale: 8.0 +2023-03-08 20:06:09,095 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.820e+02 4.112e+02 5.168e+02 6.335e+02 1.578e+03, threshold=1.034e+03, percent-clipped=4.0 +2023-03-08 20:06:36,319 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19750.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:07:01,084 INFO [train.py:898] (1/4) Epoch 6, batch 1600, loss[loss=0.2038, simple_loss=0.2817, pruned_loss=0.06293, over 18357.00 frames. ], tot_loss[loss=0.2261, simple_loss=0.3031, pruned_loss=0.07452, over 3584139.87 frames. ], batch size: 46, lr: 1.87e-02, grad_scale: 8.0 +2023-03-08 20:07:17,743 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19785.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 20:07:58,558 INFO [train.py:898] (1/4) Epoch 6, batch 1650, loss[loss=0.2373, simple_loss=0.3197, pruned_loss=0.07752, over 17960.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3025, pruned_loss=0.07461, over 3578978.80 frames. ], batch size: 65, lr: 1.87e-02, grad_scale: 8.0 +2023-03-08 20:08:06,542 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.131e+02 4.738e+02 5.632e+02 7.460e+02 1.782e+03, threshold=1.126e+03, percent-clipped=8.0 +2023-03-08 20:08:13,473 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19833.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 20:08:23,284 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-08 20:08:48,840 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.2068, 2.4951, 2.3565, 2.6713, 3.3480, 3.3322, 2.7835, 2.9475], + device='cuda:1'), covar=tensor([0.0340, 0.0324, 0.0899, 0.0376, 0.0267, 0.0153, 0.0405, 0.0343], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0075, 0.0133, 0.0108, 0.0079, 0.0060, 0.0100, 0.0104], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 20:08:56,801 INFO [train.py:898] (1/4) Epoch 6, batch 1700, loss[loss=0.2371, simple_loss=0.3166, pruned_loss=0.07882, over 18348.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3029, pruned_loss=0.07479, over 3580715.70 frames. ], batch size: 56, lr: 1.87e-02, grad_scale: 8.0 +2023-03-08 20:09:55,326 INFO [train.py:898] (1/4) Epoch 6, batch 1750, loss[loss=0.2032, simple_loss=0.2863, pruned_loss=0.05998, over 18370.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3017, pruned_loss=0.07405, over 3577678.43 frames. ], batch size: 50, lr: 1.87e-02, grad_scale: 8.0 +2023-03-08 20:10:02,904 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.466e+02 3.770e+02 4.789e+02 6.100e+02 1.481e+03, threshold=9.579e+02, percent-clipped=1.0 +2023-03-08 20:10:41,800 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.1674, 3.4195, 4.7234, 3.9980, 3.0806, 2.7234, 4.0753, 4.7218], + device='cuda:1'), covar=tensor([0.0899, 0.1322, 0.0057, 0.0298, 0.0808, 0.1068, 0.0285, 0.0216], + device='cuda:1'), in_proj_covar=tensor([0.0134, 0.0198, 0.0073, 0.0144, 0.0165, 0.0169, 0.0147, 0.0097], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 20:10:53,737 INFO [train.py:898] (1/4) Epoch 6, batch 1800, loss[loss=0.2356, simple_loss=0.3146, pruned_loss=0.07827, over 18219.00 frames. ], tot_loss[loss=0.225, simple_loss=0.3023, pruned_loss=0.07385, over 3578569.36 frames. ], batch size: 60, lr: 1.87e-02, grad_scale: 8.0 +2023-03-08 20:10:57,321 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19974.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:11:53,070 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-03-08 20:11:55,829 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20020.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:11:56,582 INFO [train.py:898] (1/4) Epoch 6, batch 1850, loss[loss=0.2037, simple_loss=0.2718, pruned_loss=0.06779, over 18438.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.3021, pruned_loss=0.07403, over 3568768.21 frames. ], batch size: 43, lr: 1.86e-02, grad_scale: 8.0 +2023-03-08 20:12:03,174 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.737e+02 4.293e+02 5.407e+02 6.655e+02 1.124e+03, threshold=1.081e+03, percent-clipped=3.0 +2023-03-08 20:12:31,044 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20050.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:12:55,179 INFO [train.py:898] (1/4) Epoch 6, batch 1900, loss[loss=0.2229, simple_loss=0.2949, pruned_loss=0.07541, over 18272.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3025, pruned_loss=0.07422, over 3570537.30 frames. ], batch size: 47, lr: 1.86e-02, grad_scale: 8.0 +2023-03-08 20:13:07,096 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20081.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:13:10,264 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-08 20:13:17,189 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20089.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:13:27,669 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20098.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:13:44,390 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20112.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:13:54,354 INFO [train.py:898] (1/4) Epoch 6, batch 1950, loss[loss=0.2861, simple_loss=0.3405, pruned_loss=0.1158, over 12295.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3018, pruned_loss=0.07384, over 3579592.07 frames. ], batch size: 129, lr: 1.86e-02, grad_scale: 8.0 +2023-03-08 20:13:55,062 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-08 20:14:01,197 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.918e+02 4.095e+02 5.447e+02 6.778e+02 1.959e+03, threshold=1.089e+03, percent-clipped=6.0 +2023-03-08 20:14:28,743 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20150.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:14:33,550 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-08 20:14:52,815 INFO [train.py:898] (1/4) Epoch 6, batch 2000, loss[loss=0.2512, simple_loss=0.3291, pruned_loss=0.0867, over 18477.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3015, pruned_loss=0.07386, over 3576716.81 frames. ], batch size: 59, lr: 1.86e-02, grad_scale: 8.0 +2023-03-08 20:14:53,374 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8355, 3.6701, 3.4594, 3.2587, 3.3733, 2.9282, 2.9301, 3.7936], + device='cuda:1'), covar=tensor([0.0030, 0.0060, 0.0059, 0.0079, 0.0073, 0.0129, 0.0138, 0.0038], + device='cuda:1'), in_proj_covar=tensor([0.0060, 0.0077, 0.0069, 0.0110, 0.0070, 0.0113, 0.0118, 0.0063], + device='cuda:1'), out_proj_covar=tensor([8.5774e-05, 1.2034e-04, 1.0716e-04, 1.7583e-04, 1.0568e-04, 1.7773e-04, + 1.8346e-04, 9.4549e-05], device='cuda:1') +2023-03-08 20:14:55,664 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20173.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:14:57,061 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.18 vs. limit=5.0 +2023-03-08 20:15:32,131 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20204.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:15:46,663 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-08 20:15:51,542 INFO [train.py:898] (1/4) Epoch 6, batch 2050, loss[loss=0.2468, simple_loss=0.3138, pruned_loss=0.0899, over 16404.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3013, pruned_loss=0.07387, over 3579580.89 frames. ], batch size: 94, lr: 1.86e-02, grad_scale: 4.0 +2023-03-08 20:15:59,260 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.572e+02 4.223e+02 5.010e+02 6.268e+02 1.616e+03, threshold=1.002e+03, percent-clipped=2.0 +2023-03-08 20:16:40,606 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3923, 2.6954, 3.8638, 4.1514, 2.4485, 4.2767, 3.9602, 2.5051], + device='cuda:1'), covar=tensor([0.0471, 0.1270, 0.0194, 0.0154, 0.1586, 0.0182, 0.0299, 0.1093], + device='cuda:1'), in_proj_covar=tensor([0.0157, 0.0191, 0.0108, 0.0114, 0.0193, 0.0150, 0.0157, 0.0177], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 20:16:43,470 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20265.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:16:49,850 INFO [train.py:898] (1/4) Epoch 6, batch 2100, loss[loss=0.2003, simple_loss=0.2718, pruned_loss=0.06438, over 18566.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3013, pruned_loss=0.07397, over 3590098.53 frames. ], batch size: 45, lr: 1.85e-02, grad_scale: 4.0 +2023-03-08 20:16:53,543 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20274.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:17:49,017 INFO [train.py:898] (1/4) Epoch 6, batch 2150, loss[loss=0.2328, simple_loss=0.3135, pruned_loss=0.07605, over 18405.00 frames. ], tot_loss[loss=0.2239, simple_loss=0.3009, pruned_loss=0.07349, over 3589543.19 frames. ], batch size: 52, lr: 1.85e-02, grad_scale: 4.0 +2023-03-08 20:17:50,332 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20322.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:17:54,094 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.20 vs. limit=5.0 +2023-03-08 20:17:56,867 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.681e+02 4.510e+02 5.215e+02 6.724e+02 1.670e+03, threshold=1.043e+03, percent-clipped=8.0 +2023-03-08 20:18:47,273 INFO [train.py:898] (1/4) Epoch 6, batch 2200, loss[loss=0.2165, simple_loss=0.2956, pruned_loss=0.06874, over 18003.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.3011, pruned_loss=0.07361, over 3587054.50 frames. ], batch size: 65, lr: 1.85e-02, grad_scale: 4.0 +2023-03-08 20:18:53,243 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20376.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:19:07,122 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20388.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:19:42,895 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20418.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:19:46,526 INFO [train.py:898] (1/4) Epoch 6, batch 2250, loss[loss=0.2726, simple_loss=0.3348, pruned_loss=0.1052, over 12598.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.3003, pruned_loss=0.07319, over 3571727.86 frames. ], batch size: 129, lr: 1.85e-02, grad_scale: 4.0 +2023-03-08 20:19:54,695 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.728e+02 4.214e+02 4.964e+02 6.088e+02 1.302e+03, threshold=9.929e+02, percent-clipped=3.0 +2023-03-08 20:19:57,098 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6028, 3.3916, 2.1601, 4.2919, 3.1552, 4.5176, 2.0569, 3.7708], + device='cuda:1'), covar=tensor([0.0475, 0.0787, 0.1349, 0.0432, 0.0764, 0.0204, 0.1292, 0.0409], + device='cuda:1'), in_proj_covar=tensor([0.0163, 0.0195, 0.0165, 0.0180, 0.0167, 0.0149, 0.0173, 0.0165], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 20:20:13,855 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20445.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:20:18,816 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20449.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:20:33,212 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-03-08 20:20:41,599 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20468.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:20:44,847 INFO [train.py:898] (1/4) Epoch 6, batch 2300, loss[loss=0.2459, simple_loss=0.3274, pruned_loss=0.08222, over 18396.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.301, pruned_loss=0.0736, over 3569307.72 frames. ], batch size: 52, lr: 1.84e-02, grad_scale: 4.0 +2023-03-08 20:20:54,749 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20479.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:21:13,731 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7686, 3.5506, 3.5090, 3.1326, 3.5099, 2.7701, 2.7218, 3.8240], + device='cuda:1'), covar=tensor([0.0026, 0.0062, 0.0058, 0.0091, 0.0054, 0.0133, 0.0141, 0.0033], + device='cuda:1'), in_proj_covar=tensor([0.0060, 0.0079, 0.0072, 0.0112, 0.0072, 0.0114, 0.0120, 0.0064], + device='cuda:1'), out_proj_covar=tensor([8.4352e-05, 1.2305e-04, 1.1121e-04, 1.8015e-04, 1.0897e-04, 1.8017e-04, + 1.8815e-04, 9.5767e-05], device='cuda:1') +2023-03-08 20:21:14,823 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0932, 2.5820, 2.1141, 2.7027, 3.3489, 3.2076, 2.7923, 2.7974], + device='cuda:1'), covar=tensor([0.0210, 0.0294, 0.0772, 0.0283, 0.0167, 0.0137, 0.0383, 0.0322], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0076, 0.0134, 0.0106, 0.0077, 0.0062, 0.0100, 0.0103], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 20:21:43,769 INFO [train.py:898] (1/4) Epoch 6, batch 2350, loss[loss=0.2428, simple_loss=0.3218, pruned_loss=0.08189, over 17901.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.3009, pruned_loss=0.07318, over 3570229.13 frames. ], batch size: 70, lr: 1.84e-02, grad_scale: 4.0 +2023-03-08 20:21:52,093 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.562e+02 3.728e+02 4.764e+02 5.894e+02 1.500e+03, threshold=9.528e+02, percent-clipped=4.0 +2023-03-08 20:22:29,306 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20560.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:22:42,386 INFO [train.py:898] (1/4) Epoch 6, batch 2400, loss[loss=0.2227, simple_loss=0.2925, pruned_loss=0.07649, over 18401.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3008, pruned_loss=0.07276, over 3587122.14 frames. ], batch size: 48, lr: 1.84e-02, grad_scale: 8.0 +2023-03-08 20:23:02,841 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20588.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:23:41,479 INFO [train.py:898] (1/4) Epoch 6, batch 2450, loss[loss=0.234, simple_loss=0.3128, pruned_loss=0.07763, over 18201.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.3002, pruned_loss=0.0727, over 3589527.66 frames. ], batch size: 60, lr: 1.84e-02, grad_scale: 8.0 +2023-03-08 20:23:49,391 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.965e+02 4.013e+02 4.826e+02 5.800e+02 1.376e+03, threshold=9.653e+02, percent-clipped=2.0 +2023-03-08 20:24:13,858 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20649.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:24:38,630 INFO [train.py:898] (1/4) Epoch 6, batch 2500, loss[loss=0.1891, simple_loss=0.2682, pruned_loss=0.05494, over 18484.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.301, pruned_loss=0.07321, over 3586510.92 frames. ], batch size: 44, lr: 1.84e-02, grad_scale: 8.0 +2023-03-08 20:24:44,504 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20676.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:25:17,053 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.69 vs. limit=5.0 +2023-03-08 20:25:27,068 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.95 vs. limit=2.0 +2023-03-08 20:25:35,730 INFO [train.py:898] (1/4) Epoch 6, batch 2550, loss[loss=0.2035, simple_loss=0.2689, pruned_loss=0.06906, over 17592.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3019, pruned_loss=0.07398, over 3575980.10 frames. ], batch size: 39, lr: 1.83e-02, grad_scale: 4.0 +2023-03-08 20:25:40,323 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20724.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:25:45,641 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.233e+02 4.696e+02 5.623e+02 7.673e+02 1.890e+03, threshold=1.125e+03, percent-clipped=13.0 +2023-03-08 20:25:48,623 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.82 vs. limit=5.0 +2023-03-08 20:26:00,144 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7792, 4.4702, 4.5956, 3.4874, 3.5577, 3.2619, 2.4221, 1.9894], + device='cuda:1'), covar=tensor([0.0186, 0.0162, 0.0055, 0.0217, 0.0270, 0.0190, 0.0773, 0.0892], + device='cuda:1'), in_proj_covar=tensor([0.0043, 0.0041, 0.0037, 0.0050, 0.0068, 0.0045, 0.0066, 0.0070], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004], + device='cuda:1') +2023-03-08 20:26:03,315 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20744.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:26:04,531 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20745.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:26:19,184 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2040, 4.3424, 2.4296, 4.4248, 5.2167, 2.4552, 3.8080, 3.8394], + device='cuda:1'), covar=tensor([0.0100, 0.1128, 0.1733, 0.0520, 0.0068, 0.1588, 0.0713, 0.0841], + device='cuda:1'), in_proj_covar=tensor([0.0089, 0.0176, 0.0180, 0.0174, 0.0076, 0.0167, 0.0188, 0.0182], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 20:26:30,185 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20768.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:26:34,036 INFO [train.py:898] (1/4) Epoch 6, batch 2600, loss[loss=0.1785, simple_loss=0.263, pruned_loss=0.04703, over 18277.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3021, pruned_loss=0.07374, over 3576106.10 frames. ], batch size: 49, lr: 1.83e-02, grad_scale: 4.0 +2023-03-08 20:26:38,093 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20774.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:27:00,486 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20793.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:27:14,764 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0171, 3.9887, 5.1977, 3.2974, 4.3920, 2.7830, 3.1005, 2.3810], + device='cuda:1'), covar=tensor([0.0677, 0.0528, 0.0045, 0.0441, 0.0458, 0.1818, 0.1744, 0.1377], + device='cuda:1'), in_proj_covar=tensor([0.0176, 0.0188, 0.0091, 0.0147, 0.0198, 0.0235, 0.0224, 0.0190], + device='cuda:1'), out_proj_covar=tensor([1.6467e-04, 1.8313e-04, 9.0041e-05, 1.4175e-04, 1.9165e-04, 2.2578e-04, + 2.2051e-04, 1.8511e-04], device='cuda:1') +2023-03-08 20:27:27,134 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20816.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:27:32,635 INFO [train.py:898] (1/4) Epoch 6, batch 2650, loss[loss=0.1904, simple_loss=0.2686, pruned_loss=0.05607, over 18285.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.3018, pruned_loss=0.07335, over 3571530.48 frames. ], batch size: 47, lr: 1.83e-02, grad_scale: 4.0 +2023-03-08 20:27:43,393 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.822e+02 3.946e+02 4.764e+02 5.553e+02 1.236e+03, threshold=9.528e+02, percent-clipped=1.0 +2023-03-08 20:27:45,986 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.2801, 2.5180, 2.3590, 2.7875, 3.2727, 3.3842, 2.8747, 3.0457], + device='cuda:1'), covar=tensor([0.0246, 0.0309, 0.0719, 0.0369, 0.0222, 0.0156, 0.0357, 0.0303], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0076, 0.0131, 0.0109, 0.0077, 0.0063, 0.0097, 0.0100], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 20:28:19,128 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20860.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:28:31,289 INFO [train.py:898] (1/4) Epoch 6, batch 2700, loss[loss=0.1978, simple_loss=0.2777, pruned_loss=0.05891, over 18279.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3024, pruned_loss=0.07357, over 3561837.50 frames. ], batch size: 47, lr: 1.83e-02, grad_scale: 4.0 +2023-03-08 20:28:42,567 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7918, 5.2784, 5.3000, 5.3440, 4.9241, 5.1888, 4.4076, 5.2046], + device='cuda:1'), covar=tensor([0.0198, 0.0319, 0.0197, 0.0224, 0.0315, 0.0220, 0.1174, 0.0226], + device='cuda:1'), in_proj_covar=tensor([0.0138, 0.0181, 0.0166, 0.0167, 0.0169, 0.0180, 0.0246, 0.0160], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005], + device='cuda:1') +2023-03-08 20:29:14,534 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20908.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:29:28,984 INFO [train.py:898] (1/4) Epoch 6, batch 2750, loss[loss=0.2225, simple_loss=0.3006, pruned_loss=0.07224, over 18620.00 frames. ], tot_loss[loss=0.2256, simple_loss=0.303, pruned_loss=0.07406, over 3570980.41 frames. ], batch size: 52, lr: 1.83e-02, grad_scale: 4.0 +2023-03-08 20:29:38,695 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.441e+02 4.086e+02 5.349e+02 6.220e+02 9.692e+02, threshold=1.070e+03, percent-clipped=2.0 +2023-03-08 20:29:42,968 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0098, 2.4484, 2.2326, 2.5634, 3.2118, 3.2387, 2.7079, 2.9134], + device='cuda:1'), covar=tensor([0.0283, 0.0279, 0.0754, 0.0418, 0.0254, 0.0178, 0.0417, 0.0311], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0081, 0.0137, 0.0113, 0.0082, 0.0065, 0.0102, 0.0102], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 20:29:57,135 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20944.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:30:27,633 INFO [train.py:898] (1/4) Epoch 6, batch 2800, loss[loss=0.2527, simple_loss=0.329, pruned_loss=0.08821, over 15927.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3019, pruned_loss=0.07313, over 3585969.84 frames. ], batch size: 94, lr: 1.82e-02, grad_scale: 8.0 +2023-03-08 20:31:22,748 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8393, 4.7983, 4.8593, 4.6485, 4.5575, 4.6682, 5.0492, 4.9853], + device='cuda:1'), covar=tensor([0.0065, 0.0081, 0.0064, 0.0086, 0.0076, 0.0106, 0.0077, 0.0081], + device='cuda:1'), in_proj_covar=tensor([0.0066, 0.0050, 0.0049, 0.0063, 0.0054, 0.0073, 0.0061, 0.0061], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 20:31:26,856 INFO [train.py:898] (1/4) Epoch 6, batch 2850, loss[loss=0.2127, simple_loss=0.2908, pruned_loss=0.06732, over 18374.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.3014, pruned_loss=0.07299, over 3591030.51 frames. ], batch size: 50, lr: 1.82e-02, grad_scale: 4.0 +2023-03-08 20:31:37,652 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.172e+02 4.192e+02 4.961e+02 6.331e+02 1.118e+03, threshold=9.922e+02, percent-clipped=2.0 +2023-03-08 20:31:50,317 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-08 20:31:54,447 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21044.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:32:03,983 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21052.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:32:24,753 INFO [train.py:898] (1/4) Epoch 6, batch 2900, loss[loss=0.2202, simple_loss=0.2918, pruned_loss=0.07432, over 18304.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.3009, pruned_loss=0.07312, over 3581048.89 frames. ], batch size: 49, lr: 1.82e-02, grad_scale: 4.0 +2023-03-08 20:32:28,181 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21074.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:32:30,613 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5678, 1.9106, 2.9470, 2.7749, 3.6745, 5.1560, 4.3517, 4.3578], + device='cuda:1'), covar=tensor([0.0574, 0.1209, 0.1117, 0.0787, 0.1036, 0.0036, 0.0285, 0.0187], + device='cuda:1'), in_proj_covar=tensor([0.0179, 0.0229, 0.0216, 0.0211, 0.0305, 0.0130, 0.0199, 0.0159], + device='cuda:1'), out_proj_covar=tensor([1.2464e-04, 1.6081e-04, 1.5933e-04, 1.3703e-04, 2.1588e-04, 8.6813e-05, + 1.3629e-04, 1.1074e-04], device='cuda:1') +2023-03-08 20:32:49,272 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21092.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:33:14,763 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21113.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:33:20,403 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21118.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:33:23,484 INFO [train.py:898] (1/4) Epoch 6, batch 2950, loss[loss=0.2086, simple_loss=0.2878, pruned_loss=0.06475, over 18386.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.3011, pruned_loss=0.07293, over 3584814.98 frames. ], batch size: 46, lr: 1.82e-02, grad_scale: 4.0 +2023-03-08 20:33:24,830 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21122.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:33:32,717 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5317, 2.9168, 2.5198, 2.9169, 3.5769, 3.5264, 2.9895, 3.0590], + device='cuda:1'), covar=tensor([0.0182, 0.0173, 0.0727, 0.0265, 0.0184, 0.0115, 0.0278, 0.0243], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0080, 0.0135, 0.0112, 0.0082, 0.0065, 0.0103, 0.0102], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 20:33:33,500 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.673e+02 4.312e+02 5.615e+02 7.522e+02 2.010e+03, threshold=1.123e+03, percent-clipped=9.0 +2023-03-08 20:33:40,477 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1648, 2.4238, 3.2686, 3.2570, 2.4061, 3.4217, 3.3381, 2.4811], + device='cuda:1'), covar=tensor([0.0282, 0.0887, 0.0191, 0.0138, 0.1002, 0.0152, 0.0326, 0.0673], + device='cuda:1'), in_proj_covar=tensor([0.0158, 0.0200, 0.0110, 0.0119, 0.0196, 0.0154, 0.0163, 0.0180], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 20:34:22,616 INFO [train.py:898] (1/4) Epoch 6, batch 3000, loss[loss=0.2298, simple_loss=0.3097, pruned_loss=0.07495, over 18393.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.3009, pruned_loss=0.07261, over 3587543.41 frames. ], batch size: 55, lr: 1.82e-02, grad_scale: 4.0 +2023-03-08 20:34:22,617 INFO [train.py:923] (1/4) Computing validation loss +2023-03-08 20:34:34,072 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1707, 4.3297, 5.2964, 3.6855, 4.4944, 3.0683, 3.1518, 2.6130], + device='cuda:1'), covar=tensor([0.0649, 0.0494, 0.0053, 0.0387, 0.0464, 0.1608, 0.1833, 0.1268], + device='cuda:1'), in_proj_covar=tensor([0.0168, 0.0186, 0.0090, 0.0143, 0.0194, 0.0224, 0.0219, 0.0183], + device='cuda:1'), out_proj_covar=tensor([1.5728e-04, 1.8008e-04, 8.8304e-05, 1.3771e-04, 1.8662e-04, 2.1559e-04, + 2.1528e-04, 1.7829e-04], device='cuda:1') +2023-03-08 20:34:34,666 INFO [train.py:932] (1/4) Epoch 6, validation: loss=0.1727, simple_loss=0.276, pruned_loss=0.03476, over 944034.00 frames. +2023-03-08 20:34:34,667 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-08 20:34:36,844 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7642, 4.7127, 4.8024, 3.4137, 3.7596, 3.4043, 2.3956, 2.2665], + device='cuda:1'), covar=tensor([0.0219, 0.0138, 0.0049, 0.0267, 0.0308, 0.0192, 0.0823, 0.0921], + device='cuda:1'), in_proj_covar=tensor([0.0045, 0.0042, 0.0038, 0.0051, 0.0071, 0.0047, 0.0069, 0.0073], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0005, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 20:34:41,525 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7053, 1.8239, 2.8166, 2.9574, 3.7178, 5.2917, 4.5483, 4.1791], + device='cuda:1'), covar=tensor([0.0631, 0.1386, 0.1313, 0.0837, 0.1195, 0.0033, 0.0283, 0.0275], + device='cuda:1'), in_proj_covar=tensor([0.0182, 0.0232, 0.0221, 0.0214, 0.0314, 0.0132, 0.0203, 0.0162], + device='cuda:1'), out_proj_covar=tensor([1.2654e-04, 1.6301e-04, 1.6308e-04, 1.3881e-04, 2.2181e-04, 8.8010e-05, + 1.3861e-04, 1.1312e-04], device='cuda:1') +2023-03-08 20:34:45,192 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21179.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:35:04,421 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6566, 3.4921, 3.2277, 2.9539, 3.2774, 2.5902, 2.6818, 3.5584], + device='cuda:1'), covar=tensor([0.0030, 0.0061, 0.0072, 0.0098, 0.0073, 0.0153, 0.0150, 0.0046], + device='cuda:1'), in_proj_covar=tensor([0.0062, 0.0082, 0.0075, 0.0116, 0.0075, 0.0119, 0.0125, 0.0066], + device='cuda:1'), out_proj_covar=tensor([8.7164e-05, 1.2688e-04, 1.1485e-04, 1.8504e-04, 1.1421e-04, 1.8623e-04, + 1.9547e-04, 9.8552e-05], device='cuda:1') +2023-03-08 20:35:33,721 INFO [train.py:898] (1/4) Epoch 6, batch 3050, loss[loss=0.2193, simple_loss=0.2992, pruned_loss=0.0697, over 18342.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2997, pruned_loss=0.07212, over 3590695.44 frames. ], batch size: 56, lr: 1.81e-02, grad_scale: 4.0 +2023-03-08 20:35:45,055 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.742e+02 3.885e+02 4.643e+02 5.808e+02 1.137e+03, threshold=9.287e+02, percent-clipped=1.0 +2023-03-08 20:35:53,491 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7853, 4.4117, 4.5308, 3.0624, 3.6447, 3.5292, 2.6949, 2.2497], + device='cuda:1'), covar=tensor([0.0168, 0.0151, 0.0049, 0.0345, 0.0279, 0.0161, 0.0729, 0.0966], + device='cuda:1'), in_proj_covar=tensor([0.0045, 0.0043, 0.0038, 0.0051, 0.0072, 0.0048, 0.0069, 0.0074], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 20:36:02,192 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21244.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:36:32,477 INFO [train.py:898] (1/4) Epoch 6, batch 3100, loss[loss=0.1746, simple_loss=0.2516, pruned_loss=0.04883, over 17640.00 frames. ], tot_loss[loss=0.2218, simple_loss=0.2996, pruned_loss=0.07206, over 3589543.26 frames. ], batch size: 39, lr: 1.81e-02, grad_scale: 4.0 +2023-03-08 20:36:58,419 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21292.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:37:10,490 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6632, 3.6645, 5.2351, 4.4618, 3.3307, 2.9770, 4.5518, 5.2906], + device='cuda:1'), covar=tensor([0.0836, 0.1686, 0.0055, 0.0240, 0.0724, 0.1032, 0.0254, 0.0134], + device='cuda:1'), in_proj_covar=tensor([0.0130, 0.0191, 0.0071, 0.0142, 0.0160, 0.0163, 0.0144, 0.0099], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 20:37:21,534 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21312.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:37:27,911 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.76 vs. limit=5.0 +2023-03-08 20:37:31,451 INFO [train.py:898] (1/4) Epoch 6, batch 3150, loss[loss=0.1918, simple_loss=0.2683, pruned_loss=0.05763, over 18263.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.299, pruned_loss=0.07185, over 3574960.82 frames. ], batch size: 45, lr: 1.81e-02, grad_scale: 4.0 +2023-03-08 20:37:35,278 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7685, 1.8569, 2.8285, 3.0635, 3.6665, 5.1653, 4.5283, 4.3341], + device='cuda:1'), covar=tensor([0.0619, 0.1310, 0.1265, 0.0725, 0.1121, 0.0041, 0.0244, 0.0233], + device='cuda:1'), in_proj_covar=tensor([0.0177, 0.0225, 0.0214, 0.0208, 0.0305, 0.0130, 0.0197, 0.0157], + device='cuda:1'), out_proj_covar=tensor([1.2260e-04, 1.5784e-04, 1.5773e-04, 1.3433e-04, 2.1419e-04, 8.7235e-05, + 1.3445e-04, 1.0962e-04], device='cuda:1') +2023-03-08 20:37:39,628 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21328.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:37:41,468 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.840e+02 4.055e+02 4.775e+02 6.175e+02 1.308e+03, threshold=9.551e+02, percent-clipped=5.0 +2023-03-08 20:38:29,899 INFO [train.py:898] (1/4) Epoch 6, batch 3200, loss[loss=0.2157, simple_loss=0.2981, pruned_loss=0.06659, over 18489.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2985, pruned_loss=0.07187, over 3582029.45 frames. ], batch size: 51, lr: 1.81e-02, grad_scale: 8.0 +2023-03-08 20:38:32,505 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21373.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:38:52,116 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21389.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:39:14,518 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21408.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:39:28,827 INFO [train.py:898] (1/4) Epoch 6, batch 3250, loss[loss=0.2006, simple_loss=0.2754, pruned_loss=0.06291, over 18150.00 frames. ], tot_loss[loss=0.221, simple_loss=0.2981, pruned_loss=0.07189, over 3583106.12 frames. ], batch size: 44, lr: 1.81e-02, grad_scale: 8.0 +2023-03-08 20:39:34,813 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7835, 4.8070, 4.9262, 4.7206, 4.5984, 4.6382, 5.1116, 5.1192], + device='cuda:1'), covar=tensor([0.0062, 0.0074, 0.0062, 0.0073, 0.0070, 0.0102, 0.0060, 0.0063], + device='cuda:1'), in_proj_covar=tensor([0.0068, 0.0049, 0.0050, 0.0064, 0.0054, 0.0073, 0.0061, 0.0061], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 20:39:39,005 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.706e+02 4.124e+02 5.141e+02 6.520e+02 1.245e+03, threshold=1.028e+03, percent-clipped=2.0 +2023-03-08 20:40:28,085 INFO [train.py:898] (1/4) Epoch 6, batch 3300, loss[loss=0.2113, simple_loss=0.2952, pruned_loss=0.06369, over 18255.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2985, pruned_loss=0.07165, over 3593362.52 frames. ], batch size: 47, lr: 1.80e-02, grad_scale: 8.0 +2023-03-08 20:40:31,898 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21474.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:40:34,628 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7974, 1.9723, 2.9513, 2.9247, 3.6956, 5.2625, 4.5003, 4.6348], + device='cuda:1'), covar=tensor([0.0550, 0.1174, 0.1241, 0.0754, 0.1101, 0.0033, 0.0257, 0.0153], + device='cuda:1'), in_proj_covar=tensor([0.0180, 0.0230, 0.0220, 0.0213, 0.0313, 0.0133, 0.0203, 0.0160], + device='cuda:1'), out_proj_covar=tensor([1.2476e-04, 1.6040e-04, 1.6172e-04, 1.3755e-04, 2.1891e-04, 8.9035e-05, + 1.3825e-04, 1.1151e-04], device='cuda:1') +2023-03-08 20:41:00,111 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5072, 5.0942, 5.5584, 5.3583, 5.2607, 6.1575, 5.7718, 5.5276], + device='cuda:1'), covar=tensor([0.0869, 0.0600, 0.0751, 0.0596, 0.1428, 0.0646, 0.0529, 0.1283], + device='cuda:1'), in_proj_covar=tensor([0.0256, 0.0195, 0.0205, 0.0198, 0.0240, 0.0287, 0.0191, 0.0278], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-08 20:41:27,334 INFO [train.py:898] (1/4) Epoch 6, batch 3350, loss[loss=0.2246, simple_loss=0.3095, pruned_loss=0.06985, over 18482.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2988, pruned_loss=0.07165, over 3593604.07 frames. ], batch size: 51, lr: 1.80e-02, grad_scale: 8.0 +2023-03-08 20:41:37,025 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.00 vs. limit=2.0 +2023-03-08 20:41:37,413 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.459e+02 4.317e+02 5.194e+02 7.020e+02 1.247e+03, threshold=1.039e+03, percent-clipped=7.0 +2023-03-08 20:42:17,182 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7608, 3.6887, 5.2119, 2.9325, 4.3749, 2.8631, 2.9561, 2.1741], + device='cuda:1'), covar=tensor([0.0726, 0.0641, 0.0052, 0.0508, 0.0463, 0.1633, 0.1888, 0.1380], + device='cuda:1'), in_proj_covar=tensor([0.0176, 0.0192, 0.0091, 0.0147, 0.0207, 0.0233, 0.0229, 0.0191], + device='cuda:1'), out_proj_covar=tensor([1.6453e-04, 1.8573e-04, 9.0449e-05, 1.4098e-04, 1.9706e-04, 2.2351e-04, + 2.2386e-04, 1.8493e-04], device='cuda:1') +2023-03-08 20:42:25,861 INFO [train.py:898] (1/4) Epoch 6, batch 3400, loss[loss=0.2125, simple_loss=0.2827, pruned_loss=0.07115, over 18239.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2978, pruned_loss=0.07106, over 3599890.63 frames. ], batch size: 45, lr: 1.80e-02, grad_scale: 4.0 +2023-03-08 20:42:32,200 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.94 vs. limit=2.0 +2023-03-08 20:42:53,910 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21595.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:43:00,473 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2898, 4.3413, 2.4501, 4.5309, 5.3111, 2.6699, 3.8397, 3.7745], + device='cuda:1'), covar=tensor([0.0066, 0.1059, 0.1531, 0.0435, 0.0040, 0.1249, 0.0646, 0.0680], + device='cuda:1'), in_proj_covar=tensor([0.0089, 0.0187, 0.0182, 0.0177, 0.0078, 0.0170, 0.0192, 0.0187], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 20:43:15,651 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21613.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:43:24,528 INFO [train.py:898] (1/4) Epoch 6, batch 3450, loss[loss=0.2472, simple_loss=0.3278, pruned_loss=0.08334, over 17000.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2978, pruned_loss=0.07126, over 3605790.79 frames. ], batch size: 78, lr: 1.80e-02, grad_scale: 4.0 +2023-03-08 20:43:35,800 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.934e+02 4.048e+02 5.156e+02 6.271e+02 2.369e+03, threshold=1.031e+03, percent-clipped=5.0 +2023-03-08 20:44:05,761 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21656.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:44:20,618 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21668.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:44:21,960 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5959, 3.3980, 1.9456, 4.4161, 2.8898, 4.5362, 2.4106, 4.0152], + device='cuda:1'), covar=tensor([0.0452, 0.0706, 0.1380, 0.0278, 0.0888, 0.0176, 0.1010, 0.0309], + device='cuda:1'), in_proj_covar=tensor([0.0165, 0.0197, 0.0169, 0.0185, 0.0171, 0.0163, 0.0177, 0.0167], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 20:44:23,905 INFO [train.py:898] (1/4) Epoch 6, batch 3500, loss[loss=0.2028, simple_loss=0.273, pruned_loss=0.06634, over 17758.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2962, pruned_loss=0.07065, over 3605760.11 frames. ], batch size: 39, lr: 1.80e-02, grad_scale: 4.0 +2023-03-08 20:44:27,536 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21674.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:44:27,877 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-08 20:44:38,486 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21684.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:45:04,847 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21708.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:45:08,309 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5018, 1.7119, 2.8939, 2.5283, 3.5187, 5.0401, 4.4512, 4.4389], + device='cuda:1'), covar=tensor([0.0642, 0.1417, 0.1265, 0.0948, 0.1148, 0.0043, 0.0252, 0.0179], + device='cuda:1'), in_proj_covar=tensor([0.0176, 0.0227, 0.0218, 0.0209, 0.0306, 0.0131, 0.0200, 0.0158], + device='cuda:1'), out_proj_covar=tensor([1.2188e-04, 1.5817e-04, 1.5926e-04, 1.3427e-04, 2.1390e-04, 8.7960e-05, + 1.3521e-04, 1.0956e-04], device='cuda:1') +2023-03-08 20:45:18,375 INFO [train.py:898] (1/4) Epoch 6, batch 3550, loss[loss=0.2681, simple_loss=0.3422, pruned_loss=0.09705, over 17213.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2973, pruned_loss=0.07093, over 3605112.77 frames. ], batch size: 78, lr: 1.79e-02, grad_scale: 4.0 +2023-03-08 20:45:24,792 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2019, 5.8242, 5.3293, 5.5290, 5.3218, 5.2660, 5.8429, 5.7958], + device='cuda:1'), covar=tensor([0.0998, 0.0579, 0.0492, 0.0589, 0.1418, 0.0659, 0.0501, 0.0531], + device='cuda:1'), in_proj_covar=tensor([0.0432, 0.0352, 0.0277, 0.0389, 0.0533, 0.0386, 0.0468, 0.0357], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0003], + device='cuda:1') +2023-03-08 20:45:28,806 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.680e+02 4.052e+02 4.672e+02 6.032e+02 1.745e+03, threshold=9.344e+02, percent-clipped=2.0 +2023-03-08 20:45:40,817 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8055, 2.8570, 4.4843, 4.3098, 2.6691, 4.6751, 4.1145, 3.1045], + device='cuda:1'), covar=tensor([0.0342, 0.1032, 0.0101, 0.0168, 0.1236, 0.0117, 0.0249, 0.0808], + device='cuda:1'), in_proj_covar=tensor([0.0161, 0.0202, 0.0113, 0.0118, 0.0198, 0.0154, 0.0163, 0.0182], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 20:45:54,979 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0547, 3.9777, 5.3579, 3.4649, 4.4637, 2.9668, 3.1794, 2.2868], + device='cuda:1'), covar=tensor([0.0620, 0.0529, 0.0038, 0.0407, 0.0433, 0.1594, 0.1738, 0.1327], + device='cuda:1'), in_proj_covar=tensor([0.0168, 0.0183, 0.0090, 0.0141, 0.0197, 0.0225, 0.0220, 0.0184], + device='cuda:1'), out_proj_covar=tensor([1.5645e-04, 1.7532e-04, 8.8358e-05, 1.3467e-04, 1.8836e-04, 2.1614e-04, + 2.1466e-04, 1.7764e-04], device='cuda:1') +2023-03-08 20:45:56,743 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21756.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:45:59,559 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.90 vs. limit=2.0 +2023-03-08 20:46:04,475 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21763.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:46:12,835 INFO [train.py:898] (1/4) Epoch 6, batch 3600, loss[loss=0.2005, simple_loss=0.2761, pruned_loss=0.06247, over 18498.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2968, pruned_loss=0.07071, over 3597325.72 frames. ], batch size: 47, lr: 1.79e-02, grad_scale: 8.0 +2023-03-08 20:46:16,444 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7284, 5.2681, 4.9030, 4.9989, 4.8348, 4.8401, 5.3196, 5.2590], + device='cuda:1'), covar=tensor([0.0930, 0.0548, 0.0766, 0.0663, 0.1318, 0.0598, 0.0480, 0.0540], + device='cuda:1'), in_proj_covar=tensor([0.0440, 0.0353, 0.0280, 0.0393, 0.0543, 0.0390, 0.0474, 0.0364], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-08 20:46:16,536 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21774.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:46:40,469 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0220, 3.3886, 3.4752, 2.8188, 2.9676, 2.8458, 2.3848, 2.1609], + device='cuda:1'), covar=tensor([0.0191, 0.0137, 0.0077, 0.0289, 0.0357, 0.0223, 0.0658, 0.0814], + device='cuda:1'), in_proj_covar=tensor([0.0045, 0.0042, 0.0037, 0.0051, 0.0073, 0.0047, 0.0069, 0.0073], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0005, 0.0003, 0.0004, 0.0004], + device='cuda:1') +2023-03-08 20:47:18,367 INFO [train.py:898] (1/4) Epoch 7, batch 0, loss[loss=0.2268, simple_loss=0.2972, pruned_loss=0.07825, over 18287.00 frames. ], tot_loss[loss=0.2268, simple_loss=0.2972, pruned_loss=0.07825, over 18287.00 frames. ], batch size: 49, lr: 1.68e-02, grad_scale: 8.0 +2023-03-08 20:47:18,368 INFO [train.py:923] (1/4) Computing validation loss +2023-03-08 20:47:30,267 INFO [train.py:932] (1/4) Epoch 7, validation: loss=0.175, simple_loss=0.2779, pruned_loss=0.0361, over 944034.00 frames. +2023-03-08 20:47:30,267 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-08 20:47:50,716 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21822.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:47:53,855 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21824.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:48:01,611 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.548e+02 4.151e+02 4.797e+02 6.024e+02 1.150e+03, threshold=9.595e+02, percent-clipped=4.0 +2023-03-08 20:48:03,190 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6350, 2.5044, 2.4600, 2.3510, 2.4894, 2.1287, 2.2687, 2.6255], + device='cuda:1'), covar=tensor([0.0029, 0.0050, 0.0060, 0.0071, 0.0065, 0.0125, 0.0107, 0.0040], + device='cuda:1'), in_proj_covar=tensor([0.0061, 0.0079, 0.0074, 0.0115, 0.0076, 0.0117, 0.0123, 0.0066], + device='cuda:1'), out_proj_covar=tensor([8.6422e-05, 1.2226e-04, 1.1288e-04, 1.8258e-04, 1.1474e-04, 1.8218e-04, + 1.9220e-04, 9.7033e-05], device='cuda:1') +2023-03-08 20:48:23,657 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21850.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:48:28,869 INFO [train.py:898] (1/4) Epoch 7, batch 50, loss[loss=0.2091, simple_loss=0.2946, pruned_loss=0.06178, over 18493.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2991, pruned_loss=0.0714, over 808171.87 frames. ], batch size: 51, lr: 1.68e-02, grad_scale: 8.0 +2023-03-08 20:48:40,255 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9241, 4.9216, 4.8906, 4.7285, 4.5846, 4.8457, 5.1370, 5.1036], + device='cuda:1'), covar=tensor([0.0052, 0.0058, 0.0069, 0.0083, 0.0068, 0.0085, 0.0070, 0.0086], + device='cuda:1'), in_proj_covar=tensor([0.0064, 0.0048, 0.0048, 0.0062, 0.0052, 0.0072, 0.0061, 0.0060], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 20:49:27,624 INFO [train.py:898] (1/4) Epoch 7, batch 100, loss[loss=0.187, simple_loss=0.2657, pruned_loss=0.05411, over 18265.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2974, pruned_loss=0.07, over 1429450.58 frames. ], batch size: 45, lr: 1.67e-02, grad_scale: 8.0 +2023-03-08 20:49:34,982 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21911.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:49:58,912 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.330e+02 3.869e+02 4.841e+02 5.741e+02 1.343e+03, threshold=9.682e+02, percent-clipped=1.0 +2023-03-08 20:50:22,292 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21951.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:50:26,553 INFO [train.py:898] (1/4) Epoch 7, batch 150, loss[loss=0.2216, simple_loss=0.3059, pruned_loss=0.06862, over 18502.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2954, pruned_loss=0.069, over 1914126.28 frames. ], batch size: 53, lr: 1.67e-02, grad_scale: 8.0 +2023-03-08 20:50:41,210 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21968.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:50:42,102 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21969.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:50:43,543 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21970.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:51:01,831 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21984.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:51:30,410 INFO [train.py:898] (1/4) Epoch 7, batch 200, loss[loss=0.1805, simple_loss=0.26, pruned_loss=0.05051, over 18548.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2957, pruned_loss=0.06806, over 2299753.33 frames. ], batch size: 45, lr: 1.67e-02, grad_scale: 8.0 +2023-03-08 20:51:43,120 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22016.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:51:59,829 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.533e+02 3.799e+02 4.724e+02 5.611e+02 1.174e+03, threshold=9.448e+02, percent-clipped=1.0 +2023-03-08 20:52:00,299 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22031.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:52:01,876 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22032.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:52:28,977 INFO [train.py:898] (1/4) Epoch 7, batch 250, loss[loss=0.2144, simple_loss=0.3029, pruned_loss=0.06298, over 18345.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2946, pruned_loss=0.06751, over 2593334.64 frames. ], batch size: 55, lr: 1.67e-02, grad_scale: 8.0 +2023-03-08 20:53:28,160 INFO [train.py:898] (1/4) Epoch 7, batch 300, loss[loss=0.1805, simple_loss=0.2662, pruned_loss=0.04745, over 18515.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2946, pruned_loss=0.0677, over 2804094.74 frames. ], batch size: 47, lr: 1.67e-02, grad_scale: 8.0 +2023-03-08 20:53:44,073 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22119.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:53:56,230 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22130.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:53:56,971 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.650e+02 3.954e+02 4.582e+02 6.003e+02 1.434e+03, threshold=9.164e+02, percent-clipped=5.0 +2023-03-08 20:54:04,222 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-08 20:54:26,849 INFO [train.py:898] (1/4) Epoch 7, batch 350, loss[loss=0.2041, simple_loss=0.2893, pruned_loss=0.05951, over 18356.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2958, pruned_loss=0.06801, over 2980128.84 frames. ], batch size: 46, lr: 1.67e-02, grad_scale: 8.0 +2023-03-08 20:54:50,714 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22176.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:55:08,645 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22191.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:55:25,936 INFO [train.py:898] (1/4) Epoch 7, batch 400, loss[loss=0.2156, simple_loss=0.2888, pruned_loss=0.07119, over 18349.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2965, pruned_loss=0.06847, over 3107263.85 frames. ], batch size: 46, lr: 1.66e-02, grad_scale: 8.0 +2023-03-08 20:55:27,303 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22206.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:55:35,538 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-08 20:55:37,476 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22215.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:55:55,641 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.341e+02 3.724e+02 4.646e+02 6.354e+02 1.977e+03, threshold=9.292e+02, percent-clipped=7.0 +2023-03-08 20:55:57,133 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.4971, 5.0816, 5.0363, 5.0360, 4.7131, 4.9320, 4.2991, 4.9057], + device='cuda:1'), covar=tensor([0.0242, 0.0277, 0.0210, 0.0238, 0.0315, 0.0221, 0.1139, 0.0273], + device='cuda:1'), in_proj_covar=tensor([0.0141, 0.0183, 0.0169, 0.0178, 0.0175, 0.0183, 0.0256, 0.0168], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005], + device='cuda:1') +2023-03-08 20:56:02,959 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22237.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:56:02,979 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22237.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:56:20,911 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22251.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:56:25,017 INFO [train.py:898] (1/4) Epoch 7, batch 450, loss[loss=0.2489, simple_loss=0.3221, pruned_loss=0.0878, over 17131.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.296, pruned_loss=0.06853, over 3208794.26 frames. ], batch size: 78, lr: 1.66e-02, grad_scale: 8.0 +2023-03-08 20:56:34,439 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0268, 4.9831, 5.0678, 4.8935, 4.9125, 4.9552, 5.2086, 5.1969], + device='cuda:1'), covar=tensor([0.0049, 0.0070, 0.0056, 0.0078, 0.0053, 0.0082, 0.0143, 0.0085], + device='cuda:1'), in_proj_covar=tensor([0.0066, 0.0048, 0.0050, 0.0064, 0.0054, 0.0073, 0.0062, 0.0062], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 20:56:41,317 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22269.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:56:49,392 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22276.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:56:51,666 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4580, 2.6327, 4.3087, 3.9019, 2.2313, 4.4986, 3.7878, 2.6211], + device='cuda:1'), covar=tensor([0.0416, 0.1240, 0.0122, 0.0217, 0.1379, 0.0120, 0.0304, 0.0951], + device='cuda:1'), in_proj_covar=tensor([0.0160, 0.0206, 0.0113, 0.0120, 0.0199, 0.0154, 0.0166, 0.0183], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 20:57:14,965 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22298.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:57:16,448 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22299.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:57:23,806 INFO [train.py:898] (1/4) Epoch 7, batch 500, loss[loss=0.2201, simple_loss=0.3015, pruned_loss=0.06933, over 18333.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2942, pruned_loss=0.06783, over 3309273.14 frames. ], batch size: 56, lr: 1.66e-02, grad_scale: 8.0 +2023-03-08 20:57:30,205 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-08 20:57:37,448 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22317.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:57:47,841 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22326.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:57:53,235 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.810e+02 3.764e+02 4.516e+02 5.475e+02 8.626e+02, threshold=9.031e+02, percent-clipped=0.0 +2023-03-08 20:58:23,009 INFO [train.py:898] (1/4) Epoch 7, batch 550, loss[loss=0.2186, simple_loss=0.3071, pruned_loss=0.06509, over 18296.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2954, pruned_loss=0.0689, over 3367045.83 frames. ], batch size: 54, lr: 1.66e-02, grad_scale: 8.0 +2023-03-08 20:58:57,613 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22385.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:59:21,869 INFO [train.py:898] (1/4) Epoch 7, batch 600, loss[loss=0.1997, simple_loss=0.2749, pruned_loss=0.06231, over 18478.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2974, pruned_loss=0.07013, over 3400070.17 frames. ], batch size: 44, lr: 1.66e-02, grad_scale: 8.0 +2023-03-08 20:59:29,762 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2148, 5.3469, 2.6349, 5.0826, 5.0219, 5.3844, 5.1306, 2.6716], + device='cuda:1'), covar=tensor([0.0140, 0.0054, 0.0786, 0.0081, 0.0075, 0.0053, 0.0096, 0.0931], + device='cuda:1'), in_proj_covar=tensor([0.0065, 0.0053, 0.0082, 0.0067, 0.0063, 0.0054, 0.0067, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 20:59:38,732 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22419.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:59:52,063 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.568e+02 3.797e+02 4.706e+02 5.910e+02 1.335e+03, threshold=9.411e+02, percent-clipped=3.0 +2023-03-08 21:00:02,587 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5109, 3.7669, 3.9671, 3.0020, 3.3154, 3.1584, 2.3119, 1.8720], + device='cuda:1'), covar=tensor([0.0175, 0.0212, 0.0072, 0.0273, 0.0313, 0.0213, 0.0841, 0.0990], + device='cuda:1'), in_proj_covar=tensor([0.0044, 0.0041, 0.0037, 0.0051, 0.0070, 0.0046, 0.0068, 0.0072], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0005, 0.0003, 0.0004, 0.0004], + device='cuda:1') +2023-03-08 21:00:09,468 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22446.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:00:19,242 INFO [train.py:898] (1/4) Epoch 7, batch 650, loss[loss=0.1861, simple_loss=0.2657, pruned_loss=0.05328, over 18504.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2954, pruned_loss=0.06918, over 3443855.30 frames. ], batch size: 47, lr: 1.65e-02, grad_scale: 8.0 +2023-03-08 21:00:35,194 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22467.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:00:43,273 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3682, 5.9808, 5.4018, 5.6943, 5.4372, 5.4360, 6.0498, 5.9760], + device='cuda:1'), covar=tensor([0.1156, 0.0549, 0.0410, 0.0611, 0.1406, 0.0609, 0.0413, 0.0491], + device='cuda:1'), in_proj_covar=tensor([0.0443, 0.0364, 0.0282, 0.0396, 0.0539, 0.0396, 0.0480, 0.0369], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-08 21:00:46,786 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0243, 4.9096, 2.5624, 4.7218, 4.6664, 4.8840, 4.6996, 2.5697], + device='cuda:1'), covar=tensor([0.0147, 0.0066, 0.0779, 0.0083, 0.0068, 0.0076, 0.0105, 0.0980], + device='cuda:1'), in_proj_covar=tensor([0.0065, 0.0054, 0.0083, 0.0068, 0.0063, 0.0054, 0.0068, 0.0087], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 21:00:56,991 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22486.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:01:18,261 INFO [train.py:898] (1/4) Epoch 7, batch 700, loss[loss=0.2147, simple_loss=0.3011, pruned_loss=0.06415, over 18569.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2956, pruned_loss=0.06891, over 3479482.06 frames. ], batch size: 54, lr: 1.65e-02, grad_scale: 8.0 +2023-03-08 21:01:19,517 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22506.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:01:49,202 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.755e+02 3.999e+02 4.789e+02 5.664e+02 1.125e+03, threshold=9.578e+02, percent-clipped=2.0 +2023-03-08 21:01:50,709 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22532.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:02:15,569 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22554.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:02:16,496 INFO [train.py:898] (1/4) Epoch 7, batch 750, loss[loss=0.3041, simple_loss=0.3526, pruned_loss=0.1278, over 12219.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2947, pruned_loss=0.06843, over 3509072.75 frames. ], batch size: 130, lr: 1.65e-02, grad_scale: 8.0 +2023-03-08 21:02:37,341 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22571.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:02:48,378 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3577, 5.0316, 5.4764, 5.3316, 5.2204, 6.0000, 5.7677, 5.4306], + device='cuda:1'), covar=tensor([0.0812, 0.0583, 0.0590, 0.0539, 0.1279, 0.0772, 0.0460, 0.1508], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0198, 0.0209, 0.0204, 0.0247, 0.0300, 0.0194, 0.0293], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-08 21:03:01,764 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22593.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:03:15,338 INFO [train.py:898] (1/4) Epoch 7, batch 800, loss[loss=0.1867, simple_loss=0.2746, pruned_loss=0.04936, over 18550.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2947, pruned_loss=0.0684, over 3529152.84 frames. ], batch size: 49, lr: 1.65e-02, grad_scale: 8.0 +2023-03-08 21:03:41,301 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22626.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:03:46,427 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.314e+02 4.023e+02 4.944e+02 6.162e+02 1.524e+03, threshold=9.887e+02, percent-clipped=2.0 +2023-03-08 21:03:55,289 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22639.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:04:13,335 INFO [train.py:898] (1/4) Epoch 7, batch 850, loss[loss=0.1963, simple_loss=0.278, pruned_loss=0.05734, over 18269.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.295, pruned_loss=0.06858, over 3542856.21 frames. ], batch size: 45, lr: 1.65e-02, grad_scale: 8.0 +2023-03-08 21:04:36,639 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22674.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:05:07,142 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22700.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 21:05:12,415 INFO [train.py:898] (1/4) Epoch 7, batch 900, loss[loss=0.2, simple_loss=0.2662, pruned_loss=0.06685, over 18401.00 frames. ], tot_loss[loss=0.215, simple_loss=0.294, pruned_loss=0.06798, over 3558028.45 frames. ], batch size: 42, lr: 1.65e-02, grad_scale: 8.0 +2023-03-08 21:05:44,110 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.332e+02 3.928e+02 4.679e+02 5.681e+02 1.388e+03, threshold=9.358e+02, percent-clipped=3.0 +2023-03-08 21:05:55,319 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22741.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:05:55,617 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9751, 3.9870, 5.2138, 2.8781, 4.3984, 2.6521, 2.8838, 2.1573], + device='cuda:1'), covar=tensor([0.0719, 0.0541, 0.0047, 0.0582, 0.0493, 0.1963, 0.2014, 0.1500], + device='cuda:1'), in_proj_covar=tensor([0.0177, 0.0192, 0.0089, 0.0147, 0.0204, 0.0232, 0.0233, 0.0192], + device='cuda:1'), out_proj_covar=tensor([1.6384e-04, 1.8286e-04, 8.7166e-05, 1.3996e-04, 1.9370e-04, 2.2089e-04, + 2.2431e-04, 1.8461e-04], device='cuda:1') +2023-03-08 21:06:11,244 INFO [train.py:898] (1/4) Epoch 7, batch 950, loss[loss=0.2413, simple_loss=0.3204, pruned_loss=0.08115, over 18148.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2939, pruned_loss=0.06781, over 3565331.41 frames. ], batch size: 62, lr: 1.64e-02, grad_scale: 8.0 +2023-03-08 21:06:49,074 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22786.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:06:58,134 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22794.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:07:10,655 INFO [train.py:898] (1/4) Epoch 7, batch 1000, loss[loss=0.2504, simple_loss=0.3186, pruned_loss=0.09109, over 12681.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2943, pruned_loss=0.06809, over 3555715.87 frames. ], batch size: 129, lr: 1.64e-02, grad_scale: 8.0 +2023-03-08 21:07:33,440 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22825.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:07:41,078 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.326e+02 3.793e+02 5.006e+02 6.046e+02 1.509e+03, threshold=1.001e+03, percent-clipped=4.0 +2023-03-08 21:07:42,900 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22832.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:07:45,169 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22834.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:08:09,936 INFO [train.py:898] (1/4) Epoch 7, batch 1050, loss[loss=0.2144, simple_loss=0.292, pruned_loss=0.06845, over 18273.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2951, pruned_loss=0.06812, over 3560665.63 frames. ], batch size: 47, lr: 1.64e-02, grad_scale: 8.0 +2023-03-08 21:08:10,403 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22855.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:08:26,314 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9222, 4.8940, 5.0420, 4.7341, 4.6358, 4.7687, 5.1460, 5.2075], + device='cuda:1'), covar=tensor([0.0056, 0.0067, 0.0059, 0.0089, 0.0066, 0.0090, 0.0065, 0.0086], + device='cuda:1'), in_proj_covar=tensor([0.0069, 0.0051, 0.0052, 0.0066, 0.0056, 0.0077, 0.0066, 0.0065], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 21:08:28,489 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22871.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:08:33,582 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-08 21:08:38,431 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22880.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:08:47,246 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22886.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:08:55,767 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22893.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:09:01,929 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-03-08 21:09:09,172 INFO [train.py:898] (1/4) Epoch 7, batch 1100, loss[loss=0.2149, simple_loss=0.2968, pruned_loss=0.06653, over 18372.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2937, pruned_loss=0.0679, over 3559722.38 frames. ], batch size: 50, lr: 1.64e-02, grad_scale: 8.0 +2023-03-08 21:09:25,080 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22919.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:09:38,077 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.302e+02 3.634e+02 4.460e+02 5.357e+02 1.645e+03, threshold=8.921e+02, percent-clipped=3.0 +2023-03-08 21:09:51,090 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22941.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:09:55,619 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.58 vs. limit=2.0 +2023-03-08 21:10:07,845 INFO [train.py:898] (1/4) Epoch 7, batch 1150, loss[loss=0.2329, simple_loss=0.3103, pruned_loss=0.07777, over 18282.00 frames. ], tot_loss[loss=0.2165, simple_loss=0.2956, pruned_loss=0.06872, over 3552535.31 frames. ], batch size: 57, lr: 1.64e-02, grad_scale: 8.0 +2023-03-08 21:10:46,739 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.69 vs. limit=5.0 +2023-03-08 21:10:55,033 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22995.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 21:11:06,265 INFO [train.py:898] (1/4) Epoch 7, batch 1200, loss[loss=0.2028, simple_loss=0.2893, pruned_loss=0.05817, over 18490.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2955, pruned_loss=0.06845, over 3564581.56 frames. ], batch size: 51, lr: 1.64e-02, grad_scale: 8.0 +2023-03-08 21:11:09,154 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-08 21:11:35,899 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.453e+02 4.180e+02 4.856e+02 6.104e+02 1.411e+03, threshold=9.713e+02, percent-clipped=4.0 +2023-03-08 21:11:40,884 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2581, 5.3279, 2.8293, 5.2204, 5.0602, 5.4371, 5.0872, 2.5793], + device='cuda:1'), covar=tensor([0.0133, 0.0054, 0.0737, 0.0053, 0.0064, 0.0045, 0.0111, 0.1094], + device='cuda:1'), in_proj_covar=tensor([0.0067, 0.0055, 0.0084, 0.0069, 0.0065, 0.0054, 0.0069, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 21:11:49,440 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23041.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:12:05,439 INFO [train.py:898] (1/4) Epoch 7, batch 1250, loss[loss=0.1855, simple_loss=0.2574, pruned_loss=0.05677, over 18361.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2961, pruned_loss=0.06876, over 3562120.33 frames. ], batch size: 42, lr: 1.63e-02, grad_scale: 8.0 +2023-03-08 21:12:44,841 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23089.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:13:04,077 INFO [train.py:898] (1/4) Epoch 7, batch 1300, loss[loss=0.1867, simple_loss=0.2632, pruned_loss=0.05511, over 18447.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2961, pruned_loss=0.06857, over 3561555.15 frames. ], batch size: 43, lr: 1.63e-02, grad_scale: 8.0 +2023-03-08 21:13:07,858 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23108.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:13:25,208 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.4083, 1.7987, 2.6417, 2.7217, 3.2907, 5.0971, 4.3745, 3.8589], + device='cuda:1'), covar=tensor([0.0995, 0.2110, 0.2090, 0.1187, 0.2032, 0.0063, 0.0381, 0.0425], + device='cuda:1'), in_proj_covar=tensor([0.0190, 0.0241, 0.0237, 0.0218, 0.0319, 0.0139, 0.0212, 0.0169], + device='cuda:1'), out_proj_covar=tensor([1.2930e-04, 1.6431e-04, 1.6950e-04, 1.3818e-04, 2.1880e-04, 9.1736e-05, + 1.4074e-04, 1.1510e-04], device='cuda:1') +2023-03-08 21:13:33,526 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.650e+02 3.767e+02 4.521e+02 6.073e+02 1.537e+03, threshold=9.042e+02, percent-clipped=6.0 +2023-03-08 21:13:42,154 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-08 21:13:57,460 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23150.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:14:03,016 INFO [train.py:898] (1/4) Epoch 7, batch 1350, loss[loss=0.2437, simple_loss=0.315, pruned_loss=0.0862, over 12993.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.295, pruned_loss=0.06786, over 3578205.11 frames. ], batch size: 129, lr: 1.63e-02, grad_scale: 8.0 +2023-03-08 21:14:16,991 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4246, 4.4389, 2.7650, 4.5239, 5.4568, 2.5663, 4.2618, 4.3575], + device='cuda:1'), covar=tensor([0.0055, 0.0986, 0.1354, 0.0457, 0.0035, 0.1288, 0.0503, 0.0537], + device='cuda:1'), in_proj_covar=tensor([0.0089, 0.0191, 0.0177, 0.0176, 0.0076, 0.0165, 0.0189, 0.0185], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 21:14:19,165 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23169.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:14:32,503 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23181.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:14:40,682 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9467, 4.8405, 4.9106, 4.8198, 4.7756, 4.8238, 5.1619, 5.2128], + device='cuda:1'), covar=tensor([0.0059, 0.0077, 0.0067, 0.0080, 0.0057, 0.0088, 0.0115, 0.0099], + device='cuda:1'), in_proj_covar=tensor([0.0067, 0.0049, 0.0050, 0.0065, 0.0054, 0.0074, 0.0063, 0.0062], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 21:15:02,205 INFO [train.py:898] (1/4) Epoch 7, batch 1400, loss[loss=0.1892, simple_loss=0.2651, pruned_loss=0.05661, over 18436.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2934, pruned_loss=0.06759, over 3576868.34 frames. ], batch size: 43, lr: 1.63e-02, grad_scale: 8.0 +2023-03-08 21:15:16,092 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23217.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:15:18,687 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.18 vs. limit=5.0 +2023-03-08 21:15:29,745 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23229.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:15:31,619 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.272e+02 3.620e+02 4.462e+02 5.791e+02 9.661e+02, threshold=8.924e+02, percent-clipped=4.0 +2023-03-08 21:16:00,132 INFO [train.py:898] (1/4) Epoch 7, batch 1450, loss[loss=0.2204, simple_loss=0.3048, pruned_loss=0.06805, over 18493.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.294, pruned_loss=0.06781, over 3576078.96 frames. ], batch size: 53, lr: 1.63e-02, grad_scale: 8.0 +2023-03-08 21:16:02,600 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-08 21:16:26,941 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23278.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:16:40,576 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23290.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:16:46,275 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23295.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:16:57,593 INFO [train.py:898] (1/4) Epoch 7, batch 1500, loss[loss=0.1997, simple_loss=0.271, pruned_loss=0.06423, over 18395.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2937, pruned_loss=0.0673, over 3590253.73 frames. ], batch size: 42, lr: 1.63e-02, grad_scale: 8.0 +2023-03-08 21:17:23,570 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3435, 5.9614, 5.3955, 5.6686, 5.3917, 5.5013, 5.9931, 5.9370], + device='cuda:1'), covar=tensor([0.1213, 0.0593, 0.0469, 0.0683, 0.1504, 0.0647, 0.0542, 0.0594], + device='cuda:1'), in_proj_covar=tensor([0.0455, 0.0368, 0.0283, 0.0404, 0.0550, 0.0405, 0.0487, 0.0385], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-08 21:17:27,943 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.706e+02 4.019e+02 4.944e+02 6.041e+02 1.007e+03, threshold=9.887e+02, percent-clipped=2.0 +2023-03-08 21:17:41,545 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23343.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:17:55,129 INFO [train.py:898] (1/4) Epoch 7, batch 1550, loss[loss=0.2267, simple_loss=0.3049, pruned_loss=0.07427, over 18301.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.294, pruned_loss=0.06744, over 3592446.26 frames. ], batch size: 57, lr: 1.62e-02, grad_scale: 8.0 +2023-03-08 21:18:29,648 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-08 21:18:53,715 INFO [train.py:898] (1/4) Epoch 7, batch 1600, loss[loss=0.2061, simple_loss=0.2887, pruned_loss=0.06177, over 18479.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2939, pruned_loss=0.06714, over 3596244.13 frames. ], batch size: 51, lr: 1.62e-02, grad_scale: 8.0 +2023-03-08 21:19:04,731 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.3133, 4.7242, 4.3965, 4.5489, 4.3112, 4.3668, 4.7623, 4.6839], + device='cuda:1'), covar=tensor([0.1082, 0.0673, 0.1693, 0.0633, 0.1370, 0.0688, 0.0629, 0.0680], + device='cuda:1'), in_proj_covar=tensor([0.0456, 0.0368, 0.0282, 0.0407, 0.0549, 0.0409, 0.0491, 0.0383], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-08 21:19:25,462 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 3.006e+02 3.819e+02 4.598e+02 5.727e+02 1.079e+03, threshold=9.196e+02, percent-clipped=2.0 +2023-03-08 21:19:36,223 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-08 21:19:47,155 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23450.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:19:49,900 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-08 21:19:52,541 INFO [train.py:898] (1/4) Epoch 7, batch 1650, loss[loss=0.203, simple_loss=0.2848, pruned_loss=0.06061, over 18504.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2938, pruned_loss=0.06719, over 3601089.78 frames. ], batch size: 51, lr: 1.62e-02, grad_scale: 8.0 +2023-03-08 21:20:04,803 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23464.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:20:24,338 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23481.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:20:32,461 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23488.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:20:43,626 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23498.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:20:51,424 INFO [train.py:898] (1/4) Epoch 7, batch 1700, loss[loss=0.179, simple_loss=0.2533, pruned_loss=0.05233, over 18422.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2935, pruned_loss=0.06712, over 3604316.80 frames. ], batch size: 43, lr: 1.62e-02, grad_scale: 8.0 +2023-03-08 21:21:21,209 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23529.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:21:23,333 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.409e+02 3.809e+02 4.583e+02 5.657e+02 1.396e+03, threshold=9.165e+02, percent-clipped=6.0 +2023-03-08 21:21:44,349 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23549.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 21:21:50,035 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1377, 4.2011, 2.3957, 4.3590, 5.1449, 2.3043, 3.9506, 4.2866], + device='cuda:1'), covar=tensor([0.0059, 0.0748, 0.1311, 0.0404, 0.0041, 0.1165, 0.0526, 0.0450], + device='cuda:1'), in_proj_covar=tensor([0.0091, 0.0194, 0.0180, 0.0180, 0.0078, 0.0166, 0.0192, 0.0187], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 21:21:50,752 INFO [train.py:898] (1/4) Epoch 7, batch 1750, loss[loss=0.217, simple_loss=0.2981, pruned_loss=0.06792, over 18504.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2932, pruned_loss=0.06702, over 3596385.52 frames. ], batch size: 59, lr: 1.62e-02, grad_scale: 16.0 +2023-03-08 21:22:13,856 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23573.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:22:27,629 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23585.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:22:50,502 INFO [train.py:898] (1/4) Epoch 7, batch 1800, loss[loss=0.2198, simple_loss=0.2978, pruned_loss=0.07088, over 18277.00 frames. ], tot_loss[loss=0.2138, simple_loss=0.2935, pruned_loss=0.06702, over 3601728.36 frames. ], batch size: 49, lr: 1.62e-02, grad_scale: 16.0 +2023-03-08 21:22:56,522 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.28 vs. limit=2.0 +2023-03-08 21:22:57,571 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-08 21:23:21,005 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.500e+02 3.624e+02 4.625e+02 5.651e+02 1.017e+03, threshold=9.251e+02, percent-clipped=3.0 +2023-03-08 21:23:47,613 INFO [train.py:898] (1/4) Epoch 7, batch 1850, loss[loss=0.2179, simple_loss=0.2978, pruned_loss=0.06902, over 16317.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2924, pruned_loss=0.06734, over 3587141.90 frames. ], batch size: 94, lr: 1.61e-02, grad_scale: 16.0 +2023-03-08 21:24:06,195 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.57 vs. limit=2.0 +2023-03-08 21:24:45,520 INFO [train.py:898] (1/4) Epoch 7, batch 1900, loss[loss=0.2151, simple_loss=0.3008, pruned_loss=0.0647, over 18409.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2922, pruned_loss=0.0671, over 3599594.68 frames. ], batch size: 48, lr: 1.61e-02, grad_scale: 16.0 +2023-03-08 21:25:00,514 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6757, 4.5585, 4.6342, 3.4619, 3.8117, 3.7329, 2.5484, 1.9820], + device='cuda:1'), covar=tensor([0.0245, 0.0124, 0.0079, 0.0255, 0.0308, 0.0183, 0.0791, 0.1118], + device='cuda:1'), in_proj_covar=tensor([0.0048, 0.0043, 0.0041, 0.0051, 0.0073, 0.0049, 0.0069, 0.0074], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 21:25:17,162 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.556e+02 4.080e+02 4.948e+02 6.191e+02 1.850e+03, threshold=9.895e+02, percent-clipped=8.0 +2023-03-08 21:25:43,817 INFO [train.py:898] (1/4) Epoch 7, batch 1950, loss[loss=0.1861, simple_loss=0.2592, pruned_loss=0.05652, over 18488.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2926, pruned_loss=0.0674, over 3600622.68 frames. ], batch size: 44, lr: 1.61e-02, grad_scale: 16.0 +2023-03-08 21:25:54,218 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23764.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:25:58,554 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3946, 5.9892, 5.3560, 5.6646, 5.4813, 5.4482, 6.0238, 6.0020], + device='cuda:1'), covar=tensor([0.1125, 0.0603, 0.0385, 0.0662, 0.1311, 0.0696, 0.0424, 0.0499], + device='cuda:1'), in_proj_covar=tensor([0.0454, 0.0367, 0.0285, 0.0409, 0.0552, 0.0405, 0.0487, 0.0385], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-08 21:26:42,015 INFO [train.py:898] (1/4) Epoch 7, batch 2000, loss[loss=0.242, simple_loss=0.3292, pruned_loss=0.07734, over 18579.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2931, pruned_loss=0.0674, over 3598356.52 frames. ], batch size: 54, lr: 1.61e-02, grad_scale: 8.0 +2023-03-08 21:26:50,350 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23812.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:27:13,551 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.566e+02 3.909e+02 4.831e+02 5.895e+02 1.179e+03, threshold=9.662e+02, percent-clipped=2.0 +2023-03-08 21:27:28,339 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.42 vs. limit=5.0 +2023-03-08 21:27:29,159 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23844.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 21:27:31,381 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5933, 5.5402, 5.0839, 5.5347, 5.5129, 4.7990, 5.4630, 5.1679], + device='cuda:1'), covar=tensor([0.0337, 0.0332, 0.1454, 0.0640, 0.0474, 0.0432, 0.0346, 0.0753], + device='cuda:1'), in_proj_covar=tensor([0.0345, 0.0384, 0.0548, 0.0310, 0.0289, 0.0364, 0.0387, 0.0490], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-08 21:27:40,897 INFO [train.py:898] (1/4) Epoch 7, batch 2050, loss[loss=0.2129, simple_loss=0.2848, pruned_loss=0.07049, over 18383.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2924, pruned_loss=0.06721, over 3593763.83 frames. ], batch size: 50, lr: 1.61e-02, grad_scale: 8.0 +2023-03-08 21:27:55,981 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2317, 4.3725, 2.4767, 4.2094, 5.3060, 2.6423, 3.3726, 3.4705], + device='cuda:1'), covar=tensor([0.0076, 0.0868, 0.1510, 0.0567, 0.0042, 0.1264, 0.0870, 0.1117], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0191, 0.0177, 0.0176, 0.0077, 0.0164, 0.0190, 0.0184], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-08 21:28:01,757 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23873.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:28:16,315 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23885.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:28:38,829 INFO [train.py:898] (1/4) Epoch 7, batch 2100, loss[loss=0.2223, simple_loss=0.3015, pruned_loss=0.07159, over 17155.00 frames. ], tot_loss[loss=0.2139, simple_loss=0.2927, pruned_loss=0.06749, over 3572054.65 frames. ], batch size: 78, lr: 1.61e-02, grad_scale: 8.0 +2023-03-08 21:28:57,400 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23921.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:28:59,761 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9466, 4.9101, 4.5149, 4.8498, 4.9222, 4.2926, 4.8279, 4.6162], + device='cuda:1'), covar=tensor([0.0331, 0.0382, 0.1241, 0.0645, 0.0417, 0.0399, 0.0338, 0.0705], + device='cuda:1'), in_proj_covar=tensor([0.0344, 0.0383, 0.0545, 0.0310, 0.0285, 0.0362, 0.0385, 0.0485], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-08 21:29:09,534 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.380e+02 3.638e+02 4.339e+02 5.858e+02 1.130e+03, threshold=8.677e+02, percent-clipped=1.0 +2023-03-08 21:29:10,878 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23933.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:29:37,937 INFO [train.py:898] (1/4) Epoch 7, batch 2150, loss[loss=0.2293, simple_loss=0.3106, pruned_loss=0.07405, over 18371.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2928, pruned_loss=0.06692, over 3582815.89 frames. ], batch size: 56, lr: 1.60e-02, grad_scale: 8.0 +2023-03-08 21:30:41,411 INFO [train.py:898] (1/4) Epoch 7, batch 2200, loss[loss=0.184, simple_loss=0.26, pruned_loss=0.05393, over 17682.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2921, pruned_loss=0.06682, over 3580655.18 frames. ], batch size: 39, lr: 1.60e-02, grad_scale: 8.0 +2023-03-08 21:31:11,878 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.180e+02 3.947e+02 4.705e+02 5.545e+02 1.194e+03, threshold=9.409e+02, percent-clipped=3.0 +2023-03-08 21:31:17,872 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24037.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:31:38,426 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2738, 4.7318, 4.3350, 4.5153, 4.3086, 4.4132, 4.7681, 4.7122], + device='cuda:1'), covar=tensor([0.0949, 0.0662, 0.1771, 0.0689, 0.1351, 0.0622, 0.0590, 0.0625], + device='cuda:1'), in_proj_covar=tensor([0.0452, 0.0369, 0.0285, 0.0404, 0.0549, 0.0406, 0.0489, 0.0381], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-08 21:31:40,402 INFO [train.py:898] (1/4) Epoch 7, batch 2250, loss[loss=0.2113, simple_loss=0.2991, pruned_loss=0.06172, over 17836.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2935, pruned_loss=0.06759, over 3567108.56 frames. ], batch size: 70, lr: 1.60e-02, grad_scale: 8.0 +2023-03-08 21:31:57,713 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4449, 6.0907, 5.3539, 5.7695, 5.6583, 5.4846, 6.1527, 6.0835], + device='cuda:1'), covar=tensor([0.1064, 0.0685, 0.0474, 0.0697, 0.1288, 0.0688, 0.0479, 0.0590], + device='cuda:1'), in_proj_covar=tensor([0.0457, 0.0373, 0.0287, 0.0408, 0.0556, 0.0410, 0.0495, 0.0386], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-08 21:32:15,762 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.59 vs. limit=5.0 +2023-03-08 21:32:19,334 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1614, 4.9703, 5.3181, 5.1401, 5.2268, 5.9010, 5.5778, 5.3850], + device='cuda:1'), covar=tensor([0.0823, 0.0638, 0.0655, 0.0555, 0.1312, 0.0610, 0.0538, 0.1347], + device='cuda:1'), in_proj_covar=tensor([0.0272, 0.0204, 0.0214, 0.0211, 0.0251, 0.0304, 0.0201, 0.0293], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-08 21:32:30,481 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24098.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:32:38,526 INFO [train.py:898] (1/4) Epoch 7, batch 2300, loss[loss=0.2361, simple_loss=0.3149, pruned_loss=0.07868, over 18496.00 frames. ], tot_loss[loss=0.2144, simple_loss=0.2936, pruned_loss=0.06763, over 3572642.64 frames. ], batch size: 53, lr: 1.60e-02, grad_scale: 8.0 +2023-03-08 21:32:52,271 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3356, 5.9381, 5.3356, 5.6936, 5.4402, 5.4697, 6.0299, 5.9403], + device='cuda:1'), covar=tensor([0.1365, 0.0691, 0.0469, 0.0711, 0.1391, 0.0723, 0.0465, 0.0636], + device='cuda:1'), in_proj_covar=tensor([0.0449, 0.0373, 0.0286, 0.0406, 0.0549, 0.0406, 0.0493, 0.0381], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-08 21:33:08,947 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.576e+02 4.201e+02 5.158e+02 6.206e+02 1.861e+03, threshold=1.032e+03, percent-clipped=10.0 +2023-03-08 21:33:23,085 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=24144.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 21:33:30,927 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-03-08 21:33:36,537 INFO [train.py:898] (1/4) Epoch 7, batch 2350, loss[loss=0.2276, simple_loss=0.3124, pruned_loss=0.07138, over 18294.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2928, pruned_loss=0.06701, over 3584198.16 frames. ], batch size: 57, lr: 1.60e-02, grad_scale: 8.0 +2023-03-08 21:33:56,926 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6649, 4.5653, 4.7256, 3.4974, 3.7742, 3.6665, 2.3625, 2.0655], + device='cuda:1'), covar=tensor([0.0225, 0.0142, 0.0049, 0.0268, 0.0329, 0.0180, 0.0861, 0.0975], + device='cuda:1'), in_proj_covar=tensor([0.0048, 0.0043, 0.0039, 0.0050, 0.0072, 0.0048, 0.0068, 0.0073], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 21:34:15,368 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-08 21:34:19,526 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24192.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:34:24,408 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5946, 4.3913, 4.5685, 3.3000, 3.6385, 3.4952, 2.4312, 1.8902], + device='cuda:1'), covar=tensor([0.0235, 0.0176, 0.0068, 0.0278, 0.0355, 0.0198, 0.0753, 0.1004], + device='cuda:1'), in_proj_covar=tensor([0.0049, 0.0043, 0.0039, 0.0051, 0.0073, 0.0048, 0.0068, 0.0074], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 21:34:36,094 INFO [train.py:898] (1/4) Epoch 7, batch 2400, loss[loss=0.1932, simple_loss=0.2712, pruned_loss=0.05761, over 18427.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2926, pruned_loss=0.06649, over 3591874.64 frames. ], batch size: 43, lr: 1.60e-02, grad_scale: 8.0 +2023-03-08 21:35:00,211 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0445, 5.0681, 2.6486, 4.9206, 4.7805, 5.0951, 4.9232, 2.3868], + device='cuda:1'), covar=tensor([0.0147, 0.0092, 0.0802, 0.0074, 0.0090, 0.0087, 0.0106, 0.1040], + device='cuda:1'), in_proj_covar=tensor([0.0068, 0.0056, 0.0084, 0.0071, 0.0067, 0.0056, 0.0071, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 21:35:09,124 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.669e+02 3.723e+02 4.227e+02 5.514e+02 9.877e+02, threshold=8.454e+02, percent-clipped=0.0 +2023-03-08 21:35:35,292 INFO [train.py:898] (1/4) Epoch 7, batch 2450, loss[loss=0.1951, simple_loss=0.2677, pruned_loss=0.06127, over 18167.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2921, pruned_loss=0.06624, over 3593594.59 frames. ], batch size: 44, lr: 1.59e-02, grad_scale: 8.0 +2023-03-08 21:36:29,362 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.08 vs. limit=5.0 +2023-03-08 21:36:33,638 INFO [train.py:898] (1/4) Epoch 7, batch 2500, loss[loss=0.1949, simple_loss=0.267, pruned_loss=0.06138, over 18432.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2916, pruned_loss=0.06584, over 3604624.38 frames. ], batch size: 43, lr: 1.59e-02, grad_scale: 8.0 +2023-03-08 21:37:05,864 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7378, 3.5493, 3.4045, 3.0305, 3.3108, 3.0111, 2.9467, 3.6240], + device='cuda:1'), covar=tensor([0.0038, 0.0072, 0.0065, 0.0107, 0.0079, 0.0120, 0.0146, 0.0056], + device='cuda:1'), in_proj_covar=tensor([0.0066, 0.0087, 0.0076, 0.0121, 0.0078, 0.0120, 0.0128, 0.0069], + device='cuda:1'), out_proj_covar=tensor([9.1671e-05, 1.3191e-04, 1.1322e-04, 1.8995e-04, 1.1630e-04, 1.8560e-04, + 1.9696e-04, 1.0100e-04], device='cuda:1') +2023-03-08 21:37:06,513 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.422e+02 3.679e+02 4.544e+02 5.517e+02 9.659e+02, threshold=9.088e+02, percent-clipped=5.0 +2023-03-08 21:37:24,892 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3275, 5.2852, 4.7523, 5.2723, 5.2047, 4.6294, 5.1811, 4.7881], + device='cuda:1'), covar=tensor([0.0374, 0.0333, 0.1478, 0.0634, 0.0471, 0.0445, 0.0353, 0.1014], + device='cuda:1'), in_proj_covar=tensor([0.0350, 0.0385, 0.0550, 0.0310, 0.0288, 0.0368, 0.0384, 0.0497], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-08 21:37:31,457 INFO [train.py:898] (1/4) Epoch 7, batch 2550, loss[loss=0.2194, simple_loss=0.3059, pruned_loss=0.06644, over 17059.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2911, pruned_loss=0.06566, over 3603867.88 frames. ], batch size: 78, lr: 1.59e-02, grad_scale: 8.0 +2023-03-08 21:37:40,055 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6668, 3.2690, 1.7718, 4.3649, 3.0513, 4.4007, 1.6273, 3.5010], + device='cuda:1'), covar=tensor([0.0376, 0.0745, 0.1375, 0.0344, 0.0784, 0.0239, 0.1435, 0.0439], + device='cuda:1'), in_proj_covar=tensor([0.0165, 0.0194, 0.0166, 0.0195, 0.0166, 0.0178, 0.0176, 0.0164], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 21:38:16,261 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24393.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:38:29,283 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7692, 2.2635, 4.3016, 4.1556, 2.1150, 4.2930, 3.6868, 2.5651], + device='cuda:1'), covar=tensor([0.0313, 0.2021, 0.0156, 0.0176, 0.2083, 0.0186, 0.0418, 0.1429], + device='cuda:1'), in_proj_covar=tensor([0.0165, 0.0206, 0.0121, 0.0124, 0.0201, 0.0163, 0.0176, 0.0185], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 21:38:29,984 INFO [train.py:898] (1/4) Epoch 7, batch 2600, loss[loss=0.222, simple_loss=0.2997, pruned_loss=0.07211, over 18573.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2921, pruned_loss=0.06615, over 3593022.52 frames. ], batch size: 54, lr: 1.59e-02, grad_scale: 4.0 +2023-03-08 21:39:05,100 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.252e+02 3.780e+02 4.661e+02 5.457e+02 1.160e+03, threshold=9.322e+02, percent-clipped=5.0 +2023-03-08 21:39:29,140 INFO [train.py:898] (1/4) Epoch 7, batch 2650, loss[loss=0.1999, simple_loss=0.2686, pruned_loss=0.0656, over 17776.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2926, pruned_loss=0.06641, over 3587605.37 frames. ], batch size: 39, lr: 1.59e-02, grad_scale: 4.0 +2023-03-08 21:40:27,787 INFO [train.py:898] (1/4) Epoch 7, batch 2700, loss[loss=0.1754, simple_loss=0.2532, pruned_loss=0.04885, over 18413.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.293, pruned_loss=0.06657, over 3590924.53 frames. ], batch size: 42, lr: 1.59e-02, grad_scale: 4.0 +2023-03-08 21:40:50,096 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.95 vs. limit=2.0 +2023-03-08 21:41:02,507 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.355e+02 3.494e+02 4.469e+02 5.662e+02 1.849e+03, threshold=8.938e+02, percent-clipped=8.0 +2023-03-08 21:41:17,731 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24547.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:41:26,599 INFO [train.py:898] (1/4) Epoch 7, batch 2750, loss[loss=0.2138, simple_loss=0.2943, pruned_loss=0.06668, over 18629.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2929, pruned_loss=0.06653, over 3573128.74 frames. ], batch size: 52, lr: 1.59e-02, grad_scale: 4.0 +2023-03-08 21:41:43,506 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24569.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:42:25,731 INFO [train.py:898] (1/4) Epoch 7, batch 2800, loss[loss=0.2146, simple_loss=0.2972, pruned_loss=0.06603, over 18606.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2921, pruned_loss=0.06573, over 3590392.58 frames. ], batch size: 52, lr: 1.58e-02, grad_scale: 8.0 +2023-03-08 21:42:29,471 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24608.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:42:55,533 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24630.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:43:01,134 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.569e+02 3.885e+02 4.580e+02 5.331e+02 1.147e+03, threshold=9.161e+02, percent-clipped=3.0 +2023-03-08 21:43:19,681 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0227, 4.1330, 2.3076, 4.1599, 5.0724, 2.5476, 3.4210, 3.5927], + device='cuda:1'), covar=tensor([0.0082, 0.1129, 0.1805, 0.0580, 0.0056, 0.1391, 0.0894, 0.0822], + device='cuda:1'), in_proj_covar=tensor([0.0091, 0.0196, 0.0181, 0.0178, 0.0078, 0.0165, 0.0191, 0.0188], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 21:43:23,573 INFO [train.py:898] (1/4) Epoch 7, batch 2850, loss[loss=0.2116, simple_loss=0.2931, pruned_loss=0.06508, over 18482.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2937, pruned_loss=0.06686, over 3589581.24 frames. ], batch size: 51, lr: 1.58e-02, grad_scale: 4.0 +2023-03-08 21:43:52,635 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6169, 2.8918, 2.6867, 2.7304, 3.4974, 3.4024, 3.0677, 2.7780], + device='cuda:1'), covar=tensor([0.0135, 0.0257, 0.0535, 0.0311, 0.0186, 0.0190, 0.0285, 0.0337], + device='cuda:1'), in_proj_covar=tensor([0.0109, 0.0089, 0.0142, 0.0120, 0.0087, 0.0071, 0.0112, 0.0113], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 21:44:08,937 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=24693.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:44:22,196 INFO [train.py:898] (1/4) Epoch 7, batch 2900, loss[loss=0.1984, simple_loss=0.2801, pruned_loss=0.05833, over 18423.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2928, pruned_loss=0.06663, over 3585014.39 frames. ], batch size: 48, lr: 1.58e-02, grad_scale: 4.0 +2023-03-08 21:44:30,522 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0754, 3.3145, 4.3603, 4.2540, 2.8014, 4.7811, 3.9572, 3.2278], + device='cuda:1'), covar=tensor([0.0323, 0.0929, 0.0151, 0.0174, 0.1268, 0.0137, 0.0377, 0.0763], + device='cuda:1'), in_proj_covar=tensor([0.0163, 0.0199, 0.0121, 0.0118, 0.0194, 0.0160, 0.0175, 0.0176], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 21:44:57,698 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.094e+02 3.800e+02 4.687e+02 5.853e+02 1.844e+03, threshold=9.374e+02, percent-clipped=5.0 +2023-03-08 21:45:05,166 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24741.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:45:10,019 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8011, 1.9568, 2.7194, 2.9829, 3.6832, 5.2910, 4.7974, 4.3953], + device='cuda:1'), covar=tensor([0.0715, 0.1457, 0.1897, 0.0852, 0.1180, 0.0047, 0.0243, 0.0246], + device='cuda:1'), in_proj_covar=tensor([0.0195, 0.0249, 0.0247, 0.0222, 0.0329, 0.0146, 0.0220, 0.0169], + device='cuda:1'), out_proj_covar=tensor([1.3024e-04, 1.6752e-04, 1.7270e-04, 1.3815e-04, 2.2188e-04, 9.4659e-05, + 1.4147e-04, 1.1375e-04], device='cuda:1') +2023-03-08 21:45:20,844 INFO [train.py:898] (1/4) Epoch 7, batch 2950, loss[loss=0.2315, simple_loss=0.3108, pruned_loss=0.07609, over 18297.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2932, pruned_loss=0.06694, over 3584489.48 frames. ], batch size: 57, lr: 1.58e-02, grad_scale: 4.0 +2023-03-08 21:45:32,678 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24765.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:46:17,415 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8638, 4.0746, 2.4503, 4.1817, 4.9436, 2.1981, 3.5365, 3.6993], + device='cuda:1'), covar=tensor([0.0115, 0.1019, 0.1637, 0.0568, 0.0066, 0.1636, 0.0777, 0.0795], + device='cuda:1'), in_proj_covar=tensor([0.0091, 0.0198, 0.0184, 0.0179, 0.0079, 0.0167, 0.0193, 0.0190], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 21:46:20,380 INFO [train.py:898] (1/4) Epoch 7, batch 3000, loss[loss=0.1894, simple_loss=0.2683, pruned_loss=0.05522, over 18514.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2931, pruned_loss=0.0664, over 3592909.89 frames. ], batch size: 47, lr: 1.58e-02, grad_scale: 4.0 +2023-03-08 21:46:20,380 INFO [train.py:923] (1/4) Computing validation loss +2023-03-08 21:46:32,364 INFO [train.py:932] (1/4) Epoch 7, validation: loss=0.1689, simple_loss=0.2715, pruned_loss=0.03314, over 944034.00 frames. +2023-03-08 21:46:32,365 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-08 21:46:51,985 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4016, 5.3161, 4.7967, 5.3099, 5.3239, 4.7592, 5.2602, 4.9885], + device='cuda:1'), covar=tensor([0.0340, 0.0377, 0.1477, 0.0653, 0.0382, 0.0383, 0.0324, 0.0786], + device='cuda:1'), in_proj_covar=tensor([0.0349, 0.0391, 0.0552, 0.0317, 0.0289, 0.0371, 0.0390, 0.0499], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-08 21:46:57,805 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24826.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:47:02,920 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2056, 5.2336, 2.7588, 5.0654, 4.9084, 5.3579, 5.0914, 2.6652], + device='cuda:1'), covar=tensor([0.0135, 0.0083, 0.0725, 0.0071, 0.0068, 0.0055, 0.0097, 0.0985], + device='cuda:1'), in_proj_covar=tensor([0.0069, 0.0059, 0.0085, 0.0072, 0.0068, 0.0057, 0.0072, 0.0089], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 21:47:08,248 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.797e+02 4.000e+02 4.650e+02 5.894e+02 1.091e+03, threshold=9.301e+02, percent-clipped=1.0 +2023-03-08 21:47:23,258 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24848.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:47:30,758 INFO [train.py:898] (1/4) Epoch 7, batch 3050, loss[loss=0.2096, simple_loss=0.2929, pruned_loss=0.06321, over 18408.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2929, pruned_loss=0.06629, over 3583105.80 frames. ], batch size: 48, lr: 1.58e-02, grad_scale: 4.0 +2023-03-08 21:48:10,124 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4818, 2.7373, 2.4881, 2.8919, 3.4972, 3.5371, 2.9781, 2.9353], + device='cuda:1'), covar=tensor([0.0186, 0.0312, 0.0676, 0.0394, 0.0197, 0.0156, 0.0432, 0.0327], + device='cuda:1'), in_proj_covar=tensor([0.0108, 0.0086, 0.0142, 0.0121, 0.0084, 0.0070, 0.0115, 0.0114], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 21:48:26,760 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24903.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:48:28,716 INFO [train.py:898] (1/4) Epoch 7, batch 3100, loss[loss=0.194, simple_loss=0.2784, pruned_loss=0.05481, over 18372.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2917, pruned_loss=0.0659, over 3591840.93 frames. ], batch size: 46, lr: 1.57e-02, grad_scale: 2.0 +2023-03-08 21:48:33,513 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24909.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:48:42,803 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1147, 3.9212, 5.1530, 3.1142, 4.2935, 2.6779, 3.0549, 2.2090], + device='cuda:1'), covar=tensor([0.0690, 0.0595, 0.0046, 0.0527, 0.0501, 0.1944, 0.2128, 0.1415], + device='cuda:1'), in_proj_covar=tensor([0.0180, 0.0191, 0.0094, 0.0149, 0.0205, 0.0233, 0.0242, 0.0192], + device='cuda:1'), out_proj_covar=tensor([1.6369e-04, 1.7941e-04, 9.0350e-05, 1.3904e-04, 1.9325e-04, 2.1904e-04, + 2.2915e-04, 1.8321e-04], device='cuda:1') +2023-03-08 21:48:52,701 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24925.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:49:05,437 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.662e+02 4.170e+02 4.880e+02 6.294e+02 1.409e+03, threshold=9.761e+02, percent-clipped=6.0 +2023-03-08 21:49:27,491 INFO [train.py:898] (1/4) Epoch 7, batch 3150, loss[loss=0.2274, simple_loss=0.3114, pruned_loss=0.07164, over 18125.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2916, pruned_loss=0.06588, over 3591849.15 frames. ], batch size: 62, lr: 1.57e-02, grad_scale: 2.0 +2023-03-08 21:49:53,049 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.3425, 3.5246, 5.2142, 4.3414, 3.1323, 2.9401, 4.2871, 5.2326], + device='cuda:1'), covar=tensor([0.0936, 0.1590, 0.0047, 0.0267, 0.0836, 0.1029, 0.0338, 0.0136], + device='cuda:1'), in_proj_covar=tensor([0.0132, 0.0212, 0.0079, 0.0148, 0.0164, 0.0165, 0.0156, 0.0106], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-08 21:49:55,474 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-03-08 21:50:15,267 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24995.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:50:26,665 INFO [train.py:898] (1/4) Epoch 7, batch 3200, loss[loss=0.2305, simple_loss=0.3165, pruned_loss=0.0722, over 18357.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.291, pruned_loss=0.0654, over 3580880.39 frames. ], batch size: 55, lr: 1.57e-02, grad_scale: 4.0 +2023-03-08 21:51:03,253 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.251e+02 3.758e+02 4.426e+02 5.545e+02 1.381e+03, threshold=8.852e+02, percent-clipped=2.0 +2023-03-08 21:51:25,659 INFO [train.py:898] (1/4) Epoch 7, batch 3250, loss[loss=0.2494, simple_loss=0.3185, pruned_loss=0.0902, over 12397.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2905, pruned_loss=0.06532, over 3567401.55 frames. ], batch size: 129, lr: 1.57e-02, grad_scale: 4.0 +2023-03-08 21:51:27,260 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25056.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 21:52:24,651 INFO [train.py:898] (1/4) Epoch 7, batch 3300, loss[loss=0.2122, simple_loss=0.2832, pruned_loss=0.07062, over 18251.00 frames. ], tot_loss[loss=0.2105, simple_loss=0.2901, pruned_loss=0.0654, over 3581298.32 frames. ], batch size: 45, lr: 1.57e-02, grad_scale: 4.0 +2023-03-08 21:52:42,794 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25121.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:53:01,192 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.469e+02 3.921e+02 4.598e+02 5.927e+02 2.644e+03, threshold=9.195e+02, percent-clipped=9.0 +2023-03-08 21:53:23,223 INFO [train.py:898] (1/4) Epoch 7, batch 3350, loss[loss=0.2015, simple_loss=0.2813, pruned_loss=0.06089, over 18390.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2914, pruned_loss=0.06627, over 3567431.61 frames. ], batch size: 48, lr: 1.57e-02, grad_scale: 4.0 +2023-03-08 21:53:27,167 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.96 vs. limit=5.0 +2023-03-08 21:54:19,410 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25203.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:54:20,415 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25204.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:54:21,396 INFO [train.py:898] (1/4) Epoch 7, batch 3400, loss[loss=0.229, simple_loss=0.3107, pruned_loss=0.07366, over 18561.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2914, pruned_loss=0.06604, over 3584231.00 frames. ], batch size: 54, lr: 1.57e-02, grad_scale: 4.0 +2023-03-08 21:54:39,240 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.92 vs. limit=5.0 +2023-03-08 21:54:44,340 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25225.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:54:56,993 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.655e+02 3.734e+02 4.394e+02 5.547e+02 1.008e+03, threshold=8.789e+02, percent-clipped=3.0 +2023-03-08 21:55:14,350 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25251.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:55:17,472 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6332, 2.4798, 2.4982, 2.3107, 2.4802, 2.1333, 2.1684, 2.5346], + device='cuda:1'), covar=tensor([0.0037, 0.0058, 0.0058, 0.0078, 0.0063, 0.0124, 0.0127, 0.0062], + device='cuda:1'), in_proj_covar=tensor([0.0068, 0.0091, 0.0081, 0.0126, 0.0082, 0.0128, 0.0134, 0.0074], + device='cuda:1'), out_proj_covar=tensor([9.3409e-05, 1.3712e-04, 1.1990e-04, 1.9756e-04, 1.2165e-04, 1.9710e-04, + 2.0443e-04, 1.0779e-04], device='cuda:1') +2023-03-08 21:55:17,633 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.00 vs. limit=2.0 +2023-03-08 21:55:19,392 INFO [train.py:898] (1/4) Epoch 7, batch 3450, loss[loss=0.2078, simple_loss=0.2834, pruned_loss=0.0661, over 18488.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2915, pruned_loss=0.06618, over 3582791.83 frames. ], batch size: 47, lr: 1.56e-02, grad_scale: 4.0 +2023-03-08 21:55:39,657 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25273.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:56:17,154 INFO [train.py:898] (1/4) Epoch 7, batch 3500, loss[loss=0.2082, simple_loss=0.2921, pruned_loss=0.06216, over 18500.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2908, pruned_loss=0.06598, over 3595383.54 frames. ], batch size: 53, lr: 1.56e-02, grad_scale: 2.0 +2023-03-08 21:56:17,629 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4195, 4.4097, 2.4677, 4.5866, 5.4823, 2.6017, 4.0734, 3.8989], + device='cuda:1'), covar=tensor([0.0039, 0.0890, 0.1381, 0.0409, 0.0026, 0.1155, 0.0460, 0.0646], + device='cuda:1'), in_proj_covar=tensor([0.0089, 0.0191, 0.0178, 0.0176, 0.0075, 0.0165, 0.0187, 0.0185], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0003], + device='cuda:1') +2023-03-08 21:56:53,683 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.862e+02 4.141e+02 4.748e+02 6.314e+02 1.477e+03, threshold=9.496e+02, percent-clipped=11.0 +2023-03-08 21:56:58,298 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6153, 1.9990, 2.6566, 2.5877, 3.3428, 4.7195, 4.1596, 3.9718], + device='cuda:1'), covar=tensor([0.0761, 0.1462, 0.1586, 0.1060, 0.1305, 0.0068, 0.0329, 0.0238], + device='cuda:1'), in_proj_covar=tensor([0.0197, 0.0250, 0.0250, 0.0223, 0.0330, 0.0145, 0.0223, 0.0169], + device='cuda:1'), out_proj_covar=tensor([1.3131e-04, 1.6757e-04, 1.7316e-04, 1.3738e-04, 2.2077e-04, 9.4372e-05, + 1.4291e-04, 1.1258e-04], device='cuda:1') +2023-03-08 21:57:08,495 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25351.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 21:57:12,757 INFO [train.py:898] (1/4) Epoch 7, batch 3550, loss[loss=0.1793, simple_loss=0.2638, pruned_loss=0.04743, over 18553.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.291, pruned_loss=0.06598, over 3596158.16 frames. ], batch size: 49, lr: 1.56e-02, grad_scale: 2.0 +2023-03-08 21:58:00,966 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-03-08 21:58:07,644 INFO [train.py:898] (1/4) Epoch 7, batch 3600, loss[loss=0.1791, simple_loss=0.2598, pruned_loss=0.04923, over 18490.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2918, pruned_loss=0.06621, over 3589217.32 frames. ], batch size: 47, lr: 1.56e-02, grad_scale: 4.0 +2023-03-08 21:58:09,642 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.90 vs. limit=5.0 +2023-03-08 21:58:24,817 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25421.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:58:31,849 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25428.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 21:58:40,128 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.475e+02 3.916e+02 4.844e+02 6.068e+02 1.506e+03, threshold=9.689e+02, percent-clipped=7.0 +2023-03-08 21:59:12,596 INFO [train.py:898] (1/4) Epoch 8, batch 0, loss[loss=0.2228, simple_loss=0.3055, pruned_loss=0.07002, over 17986.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.3055, pruned_loss=0.07002, over 17986.00 frames. ], batch size: 65, lr: 1.47e-02, grad_scale: 8.0 +2023-03-08 21:59:12,596 INFO [train.py:923] (1/4) Computing validation loss +2023-03-08 21:59:24,298 INFO [train.py:932] (1/4) Epoch 8, validation: loss=0.17, simple_loss=0.2728, pruned_loss=0.03358, over 944034.00 frames. +2023-03-08 21:59:24,299 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-08 21:59:59,983 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25469.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:00:22,359 INFO [train.py:898] (1/4) Epoch 8, batch 50, loss[loss=0.197, simple_loss=0.2819, pruned_loss=0.056, over 18499.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2907, pruned_loss=0.06447, over 818629.77 frames. ], batch size: 53, lr: 1.47e-02, grad_scale: 8.0 +2023-03-08 22:00:22,737 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25489.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 22:00:39,147 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25504.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:01:13,084 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3798, 3.1282, 1.6212, 4.0555, 2.7700, 4.2493, 1.9802, 3.7274], + device='cuda:1'), covar=tensor([0.0507, 0.0837, 0.1454, 0.0433, 0.0830, 0.0217, 0.1217, 0.0313], + device='cuda:1'), in_proj_covar=tensor([0.0169, 0.0195, 0.0171, 0.0196, 0.0169, 0.0186, 0.0178, 0.0169], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 22:01:18,381 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.473e+02 3.465e+02 4.274e+02 5.083e+02 8.127e+02, threshold=8.548e+02, percent-clipped=0.0 +2023-03-08 22:01:20,756 INFO [train.py:898] (1/4) Epoch 8, batch 100, loss[loss=0.2437, simple_loss=0.3196, pruned_loss=0.08391, over 18059.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2898, pruned_loss=0.0647, over 1435067.79 frames. ], batch size: 62, lr: 1.47e-02, grad_scale: 8.0 +2023-03-08 22:01:35,575 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25552.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:02:19,610 INFO [train.py:898] (1/4) Epoch 8, batch 150, loss[loss=0.1806, simple_loss=0.2642, pruned_loss=0.04853, over 18394.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2899, pruned_loss=0.06414, over 1917728.10 frames. ], batch size: 48, lr: 1.46e-02, grad_scale: 8.0 +2023-03-08 22:03:12,245 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6672, 4.7464, 4.7191, 4.7574, 4.6042, 5.3258, 5.0040, 4.8960], + device='cuda:1'), covar=tensor([0.0855, 0.0775, 0.0679, 0.0620, 0.1257, 0.0705, 0.0622, 0.1266], + device='cuda:1'), in_proj_covar=tensor([0.0271, 0.0207, 0.0213, 0.0215, 0.0254, 0.0304, 0.0199, 0.0298], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-08 22:03:16,569 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.739e+02 3.654e+02 4.521e+02 5.323e+02 1.367e+03, threshold=9.043e+02, percent-clipped=1.0 +2023-03-08 22:03:18,873 INFO [train.py:898] (1/4) Epoch 8, batch 200, loss[loss=0.1936, simple_loss=0.2691, pruned_loss=0.05906, over 18563.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2899, pruned_loss=0.0646, over 2272126.62 frames. ], batch size: 45, lr: 1.46e-02, grad_scale: 8.0 +2023-03-08 22:03:21,422 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7022, 4.1572, 4.3189, 3.4801, 3.6126, 3.3743, 2.2508, 1.7237], + device='cuda:1'), covar=tensor([0.0175, 0.0170, 0.0061, 0.0205, 0.0309, 0.0186, 0.0825, 0.1121], + device='cuda:1'), in_proj_covar=tensor([0.0048, 0.0042, 0.0039, 0.0052, 0.0072, 0.0048, 0.0068, 0.0073], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0005, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 22:03:32,334 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25651.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 22:03:33,659 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9397, 4.0495, 2.3836, 4.0993, 4.9725, 2.4261, 3.5380, 3.7096], + device='cuda:1'), covar=tensor([0.0076, 0.0909, 0.1495, 0.0523, 0.0047, 0.1334, 0.0716, 0.0729], + device='cuda:1'), in_proj_covar=tensor([0.0091, 0.0194, 0.0180, 0.0179, 0.0077, 0.0168, 0.0190, 0.0190], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 22:03:39,896 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=5.01 vs. limit=5.0 +2023-03-08 22:04:17,976 INFO [train.py:898] (1/4) Epoch 8, batch 250, loss[loss=0.1886, simple_loss=0.2675, pruned_loss=0.05484, over 18483.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2899, pruned_loss=0.06453, over 2565978.55 frames. ], batch size: 47, lr: 1.46e-02, grad_scale: 8.0 +2023-03-08 22:04:21,818 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6205, 3.2292, 1.8647, 4.2555, 2.8580, 4.4808, 2.1920, 3.9337], + device='cuda:1'), covar=tensor([0.0489, 0.0856, 0.1548, 0.0367, 0.0870, 0.0197, 0.1139, 0.0334], + device='cuda:1'), in_proj_covar=tensor([0.0172, 0.0197, 0.0172, 0.0199, 0.0172, 0.0188, 0.0179, 0.0171], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 22:04:29,459 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25699.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:05:02,854 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4849, 2.5756, 4.0764, 3.8579, 2.3474, 4.4941, 3.8795, 2.7994], + device='cuda:1'), covar=tensor([0.0449, 0.1454, 0.0247, 0.0263, 0.1633, 0.0142, 0.0439, 0.1035], + device='cuda:1'), in_proj_covar=tensor([0.0174, 0.0208, 0.0126, 0.0126, 0.0203, 0.0168, 0.0187, 0.0190], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 22:05:14,434 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.583e+02 3.732e+02 4.597e+02 5.455e+02 9.874e+02, threshold=9.193e+02, percent-clipped=1.0 +2023-03-08 22:05:17,281 INFO [train.py:898] (1/4) Epoch 8, batch 300, loss[loss=0.1554, simple_loss=0.2366, pruned_loss=0.03709, over 18443.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.288, pruned_loss=0.06359, over 2792002.70 frames. ], batch size: 43, lr: 1.46e-02, grad_scale: 8.0 +2023-03-08 22:06:10,053 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25784.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 22:06:15,821 INFO [train.py:898] (1/4) Epoch 8, batch 350, loss[loss=0.1974, simple_loss=0.2728, pruned_loss=0.06101, over 18358.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.288, pruned_loss=0.06371, over 2963389.90 frames. ], batch size: 46, lr: 1.46e-02, grad_scale: 8.0 +2023-03-08 22:07:11,597 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.442e+02 3.462e+02 4.130e+02 5.163e+02 1.142e+03, threshold=8.260e+02, percent-clipped=1.0 +2023-03-08 22:07:14,543 INFO [train.py:898] (1/4) Epoch 8, batch 400, loss[loss=0.2302, simple_loss=0.3134, pruned_loss=0.07356, over 18003.00 frames. ], tot_loss[loss=0.2085, simple_loss=0.2887, pruned_loss=0.06408, over 3104665.91 frames. ], batch size: 65, lr: 1.46e-02, grad_scale: 8.0 +2023-03-08 22:07:37,319 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25858.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 22:08:13,028 INFO [train.py:898] (1/4) Epoch 8, batch 450, loss[loss=0.1715, simple_loss=0.2472, pruned_loss=0.04787, over 18169.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2877, pruned_loss=0.0635, over 3218209.47 frames. ], batch size: 44, lr: 1.46e-02, grad_scale: 8.0 +2023-03-08 22:08:25,138 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5154, 5.4284, 4.9690, 5.4658, 5.4781, 4.8954, 5.3262, 5.0376], + device='cuda:1'), covar=tensor([0.0329, 0.0357, 0.1397, 0.0552, 0.0428, 0.0388, 0.0330, 0.0793], + device='cuda:1'), in_proj_covar=tensor([0.0345, 0.0404, 0.0544, 0.0321, 0.0292, 0.0374, 0.0390, 0.0505], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-08 22:08:48,665 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25919.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 22:08:49,758 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6641, 3.3720, 1.9660, 4.3555, 2.9890, 4.5552, 2.0734, 4.0587], + device='cuda:1'), covar=tensor([0.0533, 0.0872, 0.1577, 0.0491, 0.0913, 0.0260, 0.1309, 0.0357], + device='cuda:1'), in_proj_covar=tensor([0.0172, 0.0198, 0.0172, 0.0201, 0.0171, 0.0193, 0.0177, 0.0171], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 22:09:09,612 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.689e+02 3.778e+02 4.755e+02 5.799e+02 1.474e+03, threshold=9.510e+02, percent-clipped=4.0 +2023-03-08 22:09:11,982 INFO [train.py:898] (1/4) Epoch 8, batch 500, loss[loss=0.1826, simple_loss=0.2574, pruned_loss=0.05389, over 18244.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2876, pruned_loss=0.06341, over 3307069.98 frames. ], batch size: 45, lr: 1.45e-02, grad_scale: 8.0 +2023-03-08 22:09:20,804 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2857, 5.1110, 5.3785, 5.2927, 5.2263, 5.9560, 5.6027, 5.3243], + device='cuda:1'), covar=tensor([0.0814, 0.0693, 0.0694, 0.0621, 0.1446, 0.0787, 0.0614, 0.1710], + device='cuda:1'), in_proj_covar=tensor([0.0272, 0.0212, 0.0212, 0.0214, 0.0254, 0.0308, 0.0203, 0.0304], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-08 22:10:10,519 INFO [train.py:898] (1/4) Epoch 8, batch 550, loss[loss=0.2293, simple_loss=0.3125, pruned_loss=0.07309, over 18282.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2871, pruned_loss=0.06267, over 3380745.47 frames. ], batch size: 57, lr: 1.45e-02, grad_scale: 8.0 +2023-03-08 22:11:00,435 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-08 22:11:10,806 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.250e+02 3.451e+02 4.216e+02 4.967e+02 1.068e+03, threshold=8.432e+02, percent-clipped=2.0 +2023-03-08 22:11:13,149 INFO [train.py:898] (1/4) Epoch 8, batch 600, loss[loss=0.2013, simple_loss=0.2818, pruned_loss=0.06036, over 18391.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2863, pruned_loss=0.06248, over 3426504.08 frames. ], batch size: 52, lr: 1.45e-02, grad_scale: 8.0 +2023-03-08 22:12:06,766 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26084.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 22:12:12,076 INFO [train.py:898] (1/4) Epoch 8, batch 650, loss[loss=0.2275, simple_loss=0.3052, pruned_loss=0.07489, over 18384.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2857, pruned_loss=0.06187, over 3467517.50 frames. ], batch size: 50, lr: 1.45e-02, grad_scale: 8.0 +2023-03-08 22:12:28,561 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26102.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:12:49,420 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-08 22:13:03,424 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26132.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 22:13:08,682 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.333e+02 3.685e+02 4.568e+02 5.569e+02 1.081e+03, threshold=9.136e+02, percent-clipped=5.0 +2023-03-08 22:13:11,030 INFO [train.py:898] (1/4) Epoch 8, batch 700, loss[loss=0.1839, simple_loss=0.2644, pruned_loss=0.05167, over 18555.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2859, pruned_loss=0.06243, over 3481015.29 frames. ], batch size: 45, lr: 1.45e-02, grad_scale: 8.0 +2023-03-08 22:13:33,947 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6226, 1.9531, 2.6627, 2.6368, 3.2496, 4.5218, 4.0678, 3.8312], + device='cuda:1'), covar=tensor([0.0792, 0.1459, 0.1589, 0.1003, 0.1159, 0.0096, 0.0322, 0.0266], + device='cuda:1'), in_proj_covar=tensor([0.0201, 0.0255, 0.0259, 0.0224, 0.0333, 0.0151, 0.0223, 0.0174], + device='cuda:1'), out_proj_covar=tensor([1.3324e-04, 1.6959e-04, 1.7705e-04, 1.3752e-04, 2.2060e-04, 9.8333e-05, + 1.4228e-04, 1.1578e-04], device='cuda:1') +2023-03-08 22:13:39,866 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26163.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:13:42,495 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2580, 5.9170, 5.2397, 5.5997, 5.3965, 5.3286, 5.9127, 5.8918], + device='cuda:1'), covar=tensor([0.1169, 0.0616, 0.0481, 0.0671, 0.1246, 0.0710, 0.0514, 0.0590], + device='cuda:1'), in_proj_covar=tensor([0.0469, 0.0383, 0.0294, 0.0412, 0.0562, 0.0415, 0.0512, 0.0394], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-08 22:13:45,948 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26168.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:14:09,258 INFO [train.py:898] (1/4) Epoch 8, batch 750, loss[loss=0.2218, simple_loss=0.3059, pruned_loss=0.06883, over 17988.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2864, pruned_loss=0.06279, over 3504005.19 frames. ], batch size: 65, lr: 1.45e-02, grad_scale: 8.0 +2023-03-08 22:14:39,858 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26214.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 22:14:57,294 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26229.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:14:58,715 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.48 vs. limit=5.0 +2023-03-08 22:15:05,905 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.506e+02 3.613e+02 4.168e+02 4.953e+02 1.109e+03, threshold=8.337e+02, percent-clipped=3.0 +2023-03-08 22:15:08,151 INFO [train.py:898] (1/4) Epoch 8, batch 800, loss[loss=0.2211, simple_loss=0.302, pruned_loss=0.07009, over 18386.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2871, pruned_loss=0.06333, over 3521241.89 frames. ], batch size: 52, lr: 1.45e-02, grad_scale: 8.0 +2023-03-08 22:15:10,286 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.83 vs. limit=5.0 +2023-03-08 22:15:17,596 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4766, 2.7303, 2.3779, 2.6632, 3.5478, 3.4711, 2.8624, 2.8166], + device='cuda:1'), covar=tensor([0.0208, 0.0277, 0.0601, 0.0359, 0.0134, 0.0094, 0.0329, 0.0313], + device='cuda:1'), in_proj_covar=tensor([0.0112, 0.0091, 0.0143, 0.0119, 0.0089, 0.0072, 0.0117, 0.0114], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 22:15:21,561 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26250.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:16:04,255 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7570, 3.6399, 5.0480, 3.0480, 4.2541, 2.6374, 2.9634, 1.8891], + device='cuda:1'), covar=tensor([0.0863, 0.0723, 0.0060, 0.0527, 0.0525, 0.1951, 0.2069, 0.1632], + device='cuda:1'), in_proj_covar=tensor([0.0179, 0.0193, 0.0095, 0.0150, 0.0203, 0.0232, 0.0246, 0.0192], + device='cuda:1'), out_proj_covar=tensor([1.6114e-04, 1.8048e-04, 9.0442e-05, 1.3939e-04, 1.8983e-04, 2.1739e-04, + 2.2971e-04, 1.8074e-04], device='cuda:1') +2023-03-08 22:16:07,214 INFO [train.py:898] (1/4) Epoch 8, batch 850, loss[loss=0.2068, simple_loss=0.294, pruned_loss=0.05983, over 18484.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2866, pruned_loss=0.06259, over 3544086.10 frames. ], batch size: 51, lr: 1.45e-02, grad_scale: 8.0 +2023-03-08 22:16:33,999 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26311.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:16:52,394 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-03-08 22:17:04,267 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.475e+02 3.702e+02 4.449e+02 5.668e+02 1.488e+03, threshold=8.898e+02, percent-clipped=3.0 +2023-03-08 22:17:06,543 INFO [train.py:898] (1/4) Epoch 8, batch 900, loss[loss=0.2068, simple_loss=0.2871, pruned_loss=0.06321, over 18258.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2874, pruned_loss=0.06294, over 3548642.60 frames. ], batch size: 49, lr: 1.44e-02, grad_scale: 8.0 +2023-03-08 22:18:06,780 INFO [train.py:898] (1/4) Epoch 8, batch 950, loss[loss=0.2253, simple_loss=0.3047, pruned_loss=0.07297, over 18483.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2865, pruned_loss=0.0625, over 3566295.64 frames. ], batch size: 51, lr: 1.44e-02, grad_scale: 8.0 +2023-03-08 22:18:30,599 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7054, 4.7429, 4.7719, 4.6394, 4.6140, 4.5549, 5.0122, 5.0191], + device='cuda:1'), covar=tensor([0.0059, 0.0067, 0.0078, 0.0091, 0.0069, 0.0114, 0.0076, 0.0099], + device='cuda:1'), in_proj_covar=tensor([0.0069, 0.0051, 0.0051, 0.0065, 0.0054, 0.0076, 0.0063, 0.0062], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 22:18:37,049 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26414.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:18:58,435 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-03-08 22:19:04,445 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.644e+02 3.699e+02 4.361e+02 5.008e+02 1.213e+03, threshold=8.721e+02, percent-clipped=3.0 +2023-03-08 22:19:06,722 INFO [train.py:898] (1/4) Epoch 8, batch 1000, loss[loss=0.2757, simple_loss=0.3363, pruned_loss=0.1076, over 12281.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2874, pruned_loss=0.06264, over 3562758.61 frames. ], batch size: 131, lr: 1.44e-02, grad_scale: 8.0 +2023-03-08 22:19:29,039 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26458.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:19:37,951 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9083, 4.9446, 5.0190, 4.8084, 4.7594, 4.7698, 5.2146, 5.1364], + device='cuda:1'), covar=tensor([0.0057, 0.0061, 0.0055, 0.0084, 0.0071, 0.0105, 0.0058, 0.0103], + device='cuda:1'), in_proj_covar=tensor([0.0071, 0.0051, 0.0052, 0.0066, 0.0055, 0.0077, 0.0063, 0.0063], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0002, 0.0002, 0.0003, 0.0002, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 22:19:50,147 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26475.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:20:06,545 INFO [train.py:898] (1/4) Epoch 8, batch 1050, loss[loss=0.1902, simple_loss=0.2682, pruned_loss=0.05608, over 18254.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2874, pruned_loss=0.06271, over 3569235.80 frames. ], batch size: 45, lr: 1.44e-02, grad_scale: 8.0 +2023-03-08 22:20:35,320 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26514.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 22:20:47,865 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26524.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:21:03,507 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.558e+02 3.535e+02 4.269e+02 5.489e+02 1.390e+03, threshold=8.539e+02, percent-clipped=8.0 +2023-03-08 22:21:05,723 INFO [train.py:898] (1/4) Epoch 8, batch 1100, loss[loss=0.2249, simple_loss=0.3058, pruned_loss=0.07198, over 17709.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2874, pruned_loss=0.06265, over 3574182.48 frames. ], batch size: 70, lr: 1.44e-02, grad_scale: 8.0 +2023-03-08 22:21:22,563 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0928, 3.3939, 2.5693, 3.4135, 4.1315, 2.4076, 3.3100, 3.3440], + device='cuda:1'), covar=tensor([0.0099, 0.0936, 0.1224, 0.0539, 0.0074, 0.1180, 0.0610, 0.0674], + device='cuda:1'), in_proj_covar=tensor([0.0095, 0.0201, 0.0182, 0.0181, 0.0080, 0.0170, 0.0194, 0.0191], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 22:21:26,049 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6219, 2.4661, 2.5325, 2.2959, 2.5419, 2.0894, 2.2304, 2.6680], + device='cuda:1'), covar=tensor([0.0027, 0.0062, 0.0054, 0.0079, 0.0067, 0.0121, 0.0116, 0.0047], + device='cuda:1'), in_proj_covar=tensor([0.0068, 0.0096, 0.0086, 0.0131, 0.0085, 0.0129, 0.0136, 0.0073], + device='cuda:1'), out_proj_covar=tensor([9.3308e-05, 1.4355e-04, 1.2619e-04, 2.0323e-04, 1.2532e-04, 1.9720e-04, + 2.0759e-04, 1.0591e-04], device='cuda:1') +2023-03-08 22:21:32,867 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26562.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 22:21:47,626 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3554, 2.7831, 2.3180, 2.6750, 3.4039, 3.1569, 2.8731, 2.8750], + device='cuda:1'), covar=tensor([0.0175, 0.0243, 0.0682, 0.0375, 0.0182, 0.0149, 0.0334, 0.0279], + device='cuda:1'), in_proj_covar=tensor([0.0110, 0.0091, 0.0143, 0.0120, 0.0090, 0.0072, 0.0117, 0.0113], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 22:21:51,181 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4001, 4.9354, 5.5786, 5.4965, 5.1977, 6.0814, 5.7263, 5.3481], + device='cuda:1'), covar=tensor([0.1006, 0.0810, 0.0702, 0.0623, 0.1585, 0.0905, 0.0692, 0.1999], + device='cuda:1'), in_proj_covar=tensor([0.0277, 0.0212, 0.0216, 0.0218, 0.0259, 0.0314, 0.0206, 0.0299], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-08 22:22:05,740 INFO [train.py:898] (1/4) Epoch 8, batch 1150, loss[loss=0.1999, simple_loss=0.2799, pruned_loss=0.05994, over 18299.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2883, pruned_loss=0.06331, over 3581764.66 frames. ], batch size: 49, lr: 1.44e-02, grad_scale: 8.0 +2023-03-08 22:22:24,914 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26606.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:23:02,045 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.150e+02 3.587e+02 4.399e+02 5.427e+02 1.423e+03, threshold=8.799e+02, percent-clipped=5.0 +2023-03-08 22:23:04,977 INFO [train.py:898] (1/4) Epoch 8, batch 1200, loss[loss=0.1999, simple_loss=0.2752, pruned_loss=0.06235, over 18418.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2868, pruned_loss=0.06273, over 3590152.71 frames. ], batch size: 48, lr: 1.44e-02, grad_scale: 8.0 +2023-03-08 22:23:17,085 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-03-08 22:24:03,502 INFO [train.py:898] (1/4) Epoch 8, batch 1250, loss[loss=0.2105, simple_loss=0.2944, pruned_loss=0.0633, over 18489.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2862, pruned_loss=0.06247, over 3600805.60 frames. ], batch size: 53, lr: 1.43e-02, grad_scale: 8.0 +2023-03-08 22:24:59,402 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.195e+02 3.433e+02 4.104e+02 5.033e+02 1.173e+03, threshold=8.208e+02, percent-clipped=2.0 +2023-03-08 22:24:59,957 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5644, 2.0299, 2.7351, 2.7656, 3.6401, 5.3287, 4.4864, 4.2719], + device='cuda:1'), covar=tensor([0.0893, 0.1544, 0.1856, 0.1052, 0.1269, 0.0049, 0.0346, 0.0274], + device='cuda:1'), in_proj_covar=tensor([0.0205, 0.0259, 0.0263, 0.0229, 0.0336, 0.0151, 0.0231, 0.0179], + device='cuda:1'), out_proj_covar=tensor([1.3488e-04, 1.7187e-04, 1.7851e-04, 1.4003e-04, 2.2188e-04, 9.7693e-05, + 1.4486e-04, 1.1765e-04], device='cuda:1') +2023-03-08 22:25:02,167 INFO [train.py:898] (1/4) Epoch 8, batch 1300, loss[loss=0.2406, simple_loss=0.33, pruned_loss=0.07556, over 18029.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2872, pruned_loss=0.06287, over 3595768.86 frames. ], batch size: 62, lr: 1.43e-02, grad_scale: 8.0 +2023-03-08 22:25:24,927 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26758.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:25:38,740 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26770.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:26:00,834 INFO [train.py:898] (1/4) Epoch 8, batch 1350, loss[loss=0.2034, simple_loss=0.2812, pruned_loss=0.06279, over 18130.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2867, pruned_loss=0.06257, over 3588034.93 frames. ], batch size: 44, lr: 1.43e-02, grad_scale: 8.0 +2023-03-08 22:26:21,902 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26806.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:26:31,139 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4704, 3.3658, 1.8731, 4.2995, 2.9353, 4.5454, 2.2786, 3.9120], + device='cuda:1'), covar=tensor([0.0602, 0.0748, 0.1587, 0.0475, 0.0906, 0.0230, 0.1165, 0.0398], + device='cuda:1'), in_proj_covar=tensor([0.0175, 0.0201, 0.0173, 0.0210, 0.0173, 0.0198, 0.0180, 0.0173], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-08 22:26:42,386 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26824.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:26:57,542 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.883e+02 3.529e+02 4.433e+02 5.428e+02 1.307e+03, threshold=8.866e+02, percent-clipped=5.0 +2023-03-08 22:27:00,027 INFO [train.py:898] (1/4) Epoch 8, batch 1400, loss[loss=0.1932, simple_loss=0.2647, pruned_loss=0.06081, over 18509.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2863, pruned_loss=0.06196, over 3598137.46 frames. ], batch size: 47, lr: 1.43e-02, grad_scale: 8.0 +2023-03-08 22:27:39,756 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26872.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:27:45,593 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9820, 3.3207, 3.4036, 2.8106, 2.9260, 2.9401, 2.3985, 2.1413], + device='cuda:1'), covar=tensor([0.0211, 0.0188, 0.0084, 0.0237, 0.0315, 0.0204, 0.0587, 0.0696], + device='cuda:1'), in_proj_covar=tensor([0.0050, 0.0044, 0.0041, 0.0053, 0.0073, 0.0051, 0.0069, 0.0073], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 22:27:59,674 INFO [train.py:898] (1/4) Epoch 8, batch 1450, loss[loss=0.2335, simple_loss=0.3122, pruned_loss=0.07737, over 18284.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2862, pruned_loss=0.06186, over 3594922.26 frames. ], batch size: 57, lr: 1.43e-02, grad_scale: 8.0 +2023-03-08 22:28:01,339 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6798, 1.8630, 2.9628, 2.7295, 3.4574, 5.2421, 4.5783, 4.2990], + device='cuda:1'), covar=tensor([0.0813, 0.1620, 0.1563, 0.1028, 0.1355, 0.0051, 0.0316, 0.0268], + device='cuda:1'), in_proj_covar=tensor([0.0205, 0.0259, 0.0262, 0.0226, 0.0335, 0.0151, 0.0230, 0.0178], + device='cuda:1'), out_proj_covar=tensor([1.3469e-04, 1.7091e-04, 1.7793e-04, 1.3816e-04, 2.2046e-04, 9.7841e-05, + 1.4379e-04, 1.1717e-04], device='cuda:1') +2023-03-08 22:28:18,415 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26904.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 22:28:21,051 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26906.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:28:39,434 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7851, 3.9422, 5.3266, 4.7796, 3.7225, 3.0929, 4.6840, 5.5429], + device='cuda:1'), covar=tensor([0.0734, 0.1402, 0.0130, 0.0217, 0.0727, 0.0985, 0.0263, 0.0142], + device='cuda:1'), in_proj_covar=tensor([0.0133, 0.0220, 0.0085, 0.0150, 0.0168, 0.0171, 0.0160, 0.0116], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-08 22:28:56,609 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.356e+02 3.688e+02 4.427e+02 5.301e+02 1.390e+03, threshold=8.853e+02, percent-clipped=1.0 +2023-03-08 22:28:58,820 INFO [train.py:898] (1/4) Epoch 8, batch 1500, loss[loss=0.2086, simple_loss=0.2924, pruned_loss=0.06239, over 18396.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.286, pruned_loss=0.06188, over 3591198.06 frames. ], batch size: 52, lr: 1.43e-02, grad_scale: 8.0 +2023-03-08 22:29:12,578 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26950.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:29:17,099 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26954.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:29:30,514 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26965.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 22:29:58,014 INFO [train.py:898] (1/4) Epoch 8, batch 1550, loss[loss=0.2002, simple_loss=0.2792, pruned_loss=0.06054, over 18408.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2872, pruned_loss=0.06246, over 3588303.35 frames. ], batch size: 48, lr: 1.43e-02, grad_scale: 8.0 +2023-03-08 22:30:24,864 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27011.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:30:54,387 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.371e+02 3.938e+02 4.762e+02 5.471e+02 1.136e+03, threshold=9.525e+02, percent-clipped=4.0 +2023-03-08 22:30:56,659 INFO [train.py:898] (1/4) Epoch 8, batch 1600, loss[loss=0.2024, simple_loss=0.2869, pruned_loss=0.05895, over 18553.00 frames. ], tot_loss[loss=0.206, simple_loss=0.287, pruned_loss=0.06249, over 3594415.09 frames. ], batch size: 49, lr: 1.43e-02, grad_scale: 8.0 +2023-03-08 22:31:34,804 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27070.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:31:35,054 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5749, 1.8898, 2.4958, 2.5358, 3.2353, 4.7874, 4.1771, 3.8355], + device='cuda:1'), covar=tensor([0.0939, 0.1766, 0.1832, 0.1169, 0.1543, 0.0074, 0.0351, 0.0337], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0259, 0.0264, 0.0227, 0.0336, 0.0152, 0.0229, 0.0177], + device='cuda:1'), out_proj_covar=tensor([1.3545e-04, 1.7065e-04, 1.7820e-04, 1.3801e-04, 2.2042e-04, 9.8485e-05, + 1.4265e-04, 1.1632e-04], device='cuda:1') +2023-03-08 22:31:56,212 INFO [train.py:898] (1/4) Epoch 8, batch 1650, loss[loss=0.203, simple_loss=0.2904, pruned_loss=0.05779, over 18377.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2863, pruned_loss=0.06157, over 3600974.61 frames. ], batch size: 55, lr: 1.42e-02, grad_scale: 8.0 +2023-03-08 22:32:31,984 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27118.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:32:53,448 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.332e+02 3.429e+02 4.052e+02 5.174e+02 1.566e+03, threshold=8.105e+02, percent-clipped=2.0 +2023-03-08 22:32:55,968 INFO [train.py:898] (1/4) Epoch 8, batch 1700, loss[loss=0.1884, simple_loss=0.2669, pruned_loss=0.05498, over 18349.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2861, pruned_loss=0.06161, over 3603743.77 frames. ], batch size: 46, lr: 1.42e-02, grad_scale: 8.0 +2023-03-08 22:33:07,196 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27148.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:33:55,120 INFO [train.py:898] (1/4) Epoch 8, batch 1750, loss[loss=0.225, simple_loss=0.3074, pruned_loss=0.07127, over 18003.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2862, pruned_loss=0.06175, over 3594762.81 frames. ], batch size: 65, lr: 1.42e-02, grad_scale: 8.0 +2023-03-08 22:33:56,661 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9475, 4.5834, 4.6804, 3.4063, 3.6982, 3.6563, 2.3042, 2.2290], + device='cuda:1'), covar=tensor([0.0185, 0.0107, 0.0067, 0.0233, 0.0335, 0.0188, 0.0809, 0.0899], + device='cuda:1'), in_proj_covar=tensor([0.0049, 0.0043, 0.0041, 0.0053, 0.0073, 0.0049, 0.0069, 0.0073], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0005, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 22:34:02,629 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.69 vs. limit=5.0 +2023-03-08 22:34:19,889 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27209.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:34:52,835 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.362e+02 3.433e+02 4.337e+02 5.594e+02 1.098e+03, threshold=8.674e+02, percent-clipped=7.0 +2023-03-08 22:34:55,194 INFO [train.py:898] (1/4) Epoch 8, batch 1800, loss[loss=0.2133, simple_loss=0.3064, pruned_loss=0.06007, over 17064.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2865, pruned_loss=0.06183, over 3586654.89 frames. ], batch size: 78, lr: 1.42e-02, grad_scale: 8.0 +2023-03-08 22:35:19,962 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27260.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 22:35:54,564 INFO [train.py:898] (1/4) Epoch 8, batch 1850, loss[loss=0.1859, simple_loss=0.2611, pruned_loss=0.05533, over 17678.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2863, pruned_loss=0.06182, over 3582969.78 frames. ], batch size: 39, lr: 1.42e-02, grad_scale: 8.0 +2023-03-08 22:36:13,777 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27306.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:36:50,629 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.586e+02 3.923e+02 4.847e+02 5.995e+02 1.610e+03, threshold=9.695e+02, percent-clipped=7.0 +2023-03-08 22:36:53,083 INFO [train.py:898] (1/4) Epoch 8, batch 1900, loss[loss=0.197, simple_loss=0.2866, pruned_loss=0.05374, over 18587.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2867, pruned_loss=0.06245, over 3586215.81 frames. ], batch size: 54, lr: 1.42e-02, grad_scale: 16.0 +2023-03-08 22:37:35,085 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-08 22:37:51,478 INFO [train.py:898] (1/4) Epoch 8, batch 1950, loss[loss=0.1891, simple_loss=0.2813, pruned_loss=0.04846, over 18405.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2871, pruned_loss=0.06269, over 3575870.81 frames. ], batch size: 52, lr: 1.42e-02, grad_scale: 16.0 +2023-03-08 22:38:47,714 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.257e+02 3.355e+02 4.079e+02 5.085e+02 1.650e+03, threshold=8.157e+02, percent-clipped=2.0 +2023-03-08 22:38:49,969 INFO [train.py:898] (1/4) Epoch 8, batch 2000, loss[loss=0.2123, simple_loss=0.2925, pruned_loss=0.0661, over 18387.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2867, pruned_loss=0.06219, over 3589158.49 frames. ], batch size: 56, lr: 1.42e-02, grad_scale: 16.0 +2023-03-08 22:38:56,716 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7104, 3.7237, 3.5386, 3.1197, 3.5661, 2.7205, 2.9763, 3.8354], + device='cuda:1'), covar=tensor([0.0037, 0.0060, 0.0063, 0.0105, 0.0060, 0.0151, 0.0136, 0.0041], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0097, 0.0088, 0.0132, 0.0086, 0.0132, 0.0139, 0.0074], + device='cuda:1'), out_proj_covar=tensor([9.9010e-05, 1.4318e-04, 1.2931e-04, 2.0303e-04, 1.2541e-04, 2.0192e-04, + 2.1060e-04, 1.0694e-04], device='cuda:1') +2023-03-08 22:39:48,978 INFO [train.py:898] (1/4) Epoch 8, batch 2050, loss[loss=0.2117, simple_loss=0.3064, pruned_loss=0.05856, over 18477.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.286, pruned_loss=0.06182, over 3600340.99 frames. ], batch size: 51, lr: 1.41e-02, grad_scale: 8.0 +2023-03-08 22:40:04,295 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-08 22:40:06,972 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27504.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:40:46,830 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.547e+02 3.620e+02 4.320e+02 6.156e+02 2.106e+03, threshold=8.640e+02, percent-clipped=12.0 +2023-03-08 22:40:48,011 INFO [train.py:898] (1/4) Epoch 8, batch 2100, loss[loss=0.1932, simple_loss=0.2739, pruned_loss=0.05629, over 18257.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2863, pruned_loss=0.06205, over 3603111.69 frames. ], batch size: 47, lr: 1.41e-02, grad_scale: 8.0 +2023-03-08 22:41:12,680 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27560.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 22:41:47,089 INFO [train.py:898] (1/4) Epoch 8, batch 2150, loss[loss=0.1783, simple_loss=0.257, pruned_loss=0.04979, over 18173.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2857, pruned_loss=0.06183, over 3593820.54 frames. ], batch size: 44, lr: 1.41e-02, grad_scale: 8.0 +2023-03-08 22:41:55,543 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7392, 3.4615, 3.4415, 2.9409, 3.4370, 2.5205, 2.2804, 3.7832], + device='cuda:1'), covar=tensor([0.0029, 0.0072, 0.0072, 0.0119, 0.0059, 0.0202, 0.0263, 0.0037], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0098, 0.0088, 0.0134, 0.0086, 0.0133, 0.0139, 0.0074], + device='cuda:1'), out_proj_covar=tensor([9.9179e-05, 1.4433e-04, 1.2890e-04, 2.0649e-04, 1.2538e-04, 2.0358e-04, + 2.1143e-04, 1.0597e-04], device='cuda:1') +2023-03-08 22:42:07,679 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27606.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:42:09,889 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27608.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 22:42:45,318 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.528e+02 3.451e+02 4.193e+02 5.044e+02 8.620e+02, threshold=8.386e+02, percent-clipped=0.0 +2023-03-08 22:42:46,499 INFO [train.py:898] (1/4) Epoch 8, batch 2200, loss[loss=0.2559, simple_loss=0.3236, pruned_loss=0.09407, over 12593.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2866, pruned_loss=0.06214, over 3594109.97 frames. ], batch size: 130, lr: 1.41e-02, grad_scale: 8.0 +2023-03-08 22:43:01,780 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7812, 3.6147, 3.5678, 2.9436, 3.3514, 2.6280, 2.6399, 3.7000], + device='cuda:1'), covar=tensor([0.0031, 0.0054, 0.0060, 0.0121, 0.0070, 0.0175, 0.0178, 0.0043], + device='cuda:1'), in_proj_covar=tensor([0.0073, 0.0099, 0.0089, 0.0137, 0.0087, 0.0135, 0.0142, 0.0075], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-08 22:43:03,785 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27654.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:43:46,132 INFO [train.py:898] (1/4) Epoch 8, batch 2250, loss[loss=0.1955, simple_loss=0.2704, pruned_loss=0.0603, over 18265.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2857, pruned_loss=0.06193, over 3574837.70 frames. ], batch size: 47, lr: 1.41e-02, grad_scale: 8.0 +2023-03-08 22:43:52,216 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0898, 2.5349, 2.1332, 2.3049, 3.0955, 3.0548, 2.7638, 2.6079], + device='cuda:1'), covar=tensor([0.0198, 0.0286, 0.0717, 0.0413, 0.0243, 0.0169, 0.0386, 0.0329], + device='cuda:1'), in_proj_covar=tensor([0.0109, 0.0093, 0.0143, 0.0123, 0.0092, 0.0073, 0.0121, 0.0118], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 22:44:15,656 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27714.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 22:44:22,846 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-08 22:44:28,937 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.4583, 2.0677, 2.8449, 2.7838, 3.6054, 5.3054, 4.5523, 4.1807], + device='cuda:1'), covar=tensor([0.0999, 0.1663, 0.1891, 0.1122, 0.1382, 0.0057, 0.0347, 0.0321], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0262, 0.0266, 0.0229, 0.0337, 0.0155, 0.0231, 0.0177], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-08 22:44:44,343 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.230e+02 4.253e+02 4.744e+02 6.262e+02 1.251e+03, threshold=9.489e+02, percent-clipped=5.0 +2023-03-08 22:44:45,485 INFO [train.py:898] (1/4) Epoch 8, batch 2300, loss[loss=0.2289, simple_loss=0.3127, pruned_loss=0.07256, over 18303.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2863, pruned_loss=0.06218, over 3579548.64 frames. ], batch size: 57, lr: 1.41e-02, grad_scale: 8.0 +2023-03-08 22:45:09,847 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7823, 4.4438, 4.5264, 3.5297, 3.6371, 3.5430, 2.4850, 2.1363], + device='cuda:1'), covar=tensor([0.0248, 0.0192, 0.0106, 0.0252, 0.0375, 0.0286, 0.0832, 0.1011], + device='cuda:1'), in_proj_covar=tensor([0.0051, 0.0043, 0.0043, 0.0054, 0.0074, 0.0051, 0.0069, 0.0074], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 22:45:28,245 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27775.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 22:45:44,259 INFO [train.py:898] (1/4) Epoch 8, batch 2350, loss[loss=0.1985, simple_loss=0.2802, pruned_loss=0.05838, over 18396.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2855, pruned_loss=0.06149, over 3589203.62 frames. ], batch size: 48, lr: 1.41e-02, grad_scale: 8.0 +2023-03-08 22:46:02,952 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27804.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:46:42,182 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.997e+02 3.557e+02 4.056e+02 4.769e+02 1.044e+03, threshold=8.112e+02, percent-clipped=2.0 +2023-03-08 22:46:43,706 INFO [train.py:898] (1/4) Epoch 8, batch 2400, loss[loss=0.2165, simple_loss=0.296, pruned_loss=0.06843, over 18356.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2863, pruned_loss=0.06176, over 3588240.57 frames. ], batch size: 46, lr: 1.41e-02, grad_scale: 8.0 +2023-03-08 22:46:52,738 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8051, 3.6024, 3.5694, 3.0192, 3.4521, 3.0296, 2.6616, 3.8059], + device='cuda:1'), covar=tensor([0.0030, 0.0068, 0.0053, 0.0105, 0.0064, 0.0119, 0.0167, 0.0042], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0097, 0.0088, 0.0135, 0.0087, 0.0133, 0.0140, 0.0074], + device='cuda:1'), out_proj_covar=tensor([9.9618e-05, 1.4325e-04, 1.2807e-04, 2.0880e-04, 1.2579e-04, 2.0209e-04, + 2.1238e-04, 1.0612e-04], device='cuda:1') +2023-03-08 22:46:56,543 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-08 22:46:59,370 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27852.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:47:42,924 INFO [train.py:898] (1/4) Epoch 8, batch 2450, loss[loss=0.1933, simple_loss=0.2677, pruned_loss=0.05947, over 18256.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2862, pruned_loss=0.06148, over 3596320.17 frames. ], batch size: 45, lr: 1.40e-02, grad_scale: 8.0 +2023-03-08 22:48:26,149 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3061, 5.2054, 5.5302, 5.3439, 5.1530, 6.0925, 5.6468, 5.2506], + device='cuda:1'), covar=tensor([0.1036, 0.0624, 0.0694, 0.0653, 0.1557, 0.0794, 0.0780, 0.1641], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0214, 0.0222, 0.0222, 0.0263, 0.0323, 0.0210, 0.0311], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-08 22:48:41,269 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.646e+02 3.668e+02 4.454e+02 5.440e+02 2.886e+03, threshold=8.907e+02, percent-clipped=9.0 +2023-03-08 22:48:42,417 INFO [train.py:898] (1/4) Epoch 8, batch 2500, loss[loss=0.1854, simple_loss=0.2658, pruned_loss=0.05256, over 17174.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2856, pruned_loss=0.06125, over 3598167.20 frames. ], batch size: 38, lr: 1.40e-02, grad_scale: 8.0 +2023-03-08 22:49:41,161 INFO [train.py:898] (1/4) Epoch 8, batch 2550, loss[loss=0.2235, simple_loss=0.3075, pruned_loss=0.06971, over 18316.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2866, pruned_loss=0.06177, over 3584559.11 frames. ], batch size: 57, lr: 1.40e-02, grad_scale: 8.0 +2023-03-08 22:50:45,008 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.293e+02 3.705e+02 4.339e+02 5.125e+02 8.562e+02, threshold=8.678e+02, percent-clipped=0.0 +2023-03-08 22:50:45,033 INFO [train.py:898] (1/4) Epoch 8, batch 2600, loss[loss=0.2273, simple_loss=0.3073, pruned_loss=0.0736, over 18400.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2867, pruned_loss=0.06154, over 3596118.05 frames. ], batch size: 52, lr: 1.40e-02, grad_scale: 4.0 +2023-03-08 22:51:22,039 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28070.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 22:51:25,907 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-08 22:51:43,992 INFO [train.py:898] (1/4) Epoch 8, batch 2650, loss[loss=0.2368, simple_loss=0.313, pruned_loss=0.08027, over 16538.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.286, pruned_loss=0.06125, over 3597932.56 frames. ], batch size: 94, lr: 1.40e-02, grad_scale: 4.0 +2023-03-08 22:52:11,783 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6487, 3.9227, 5.4217, 4.5969, 3.3213, 3.3007, 4.5886, 5.5304], + device='cuda:1'), covar=tensor([0.0849, 0.1209, 0.0045, 0.0264, 0.0776, 0.0883, 0.0288, 0.0099], + device='cuda:1'), in_proj_covar=tensor([0.0133, 0.0220, 0.0086, 0.0149, 0.0169, 0.0170, 0.0159, 0.0118], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-08 22:52:42,846 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.340e+02 3.798e+02 4.426e+02 5.240e+02 9.211e+02, threshold=8.852e+02, percent-clipped=2.0 +2023-03-08 22:52:42,883 INFO [train.py:898] (1/4) Epoch 8, batch 2700, loss[loss=0.2184, simple_loss=0.3018, pruned_loss=0.06751, over 17122.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.2864, pruned_loss=0.06138, over 3593011.04 frames. ], batch size: 78, lr: 1.40e-02, grad_scale: 4.0 +2023-03-08 22:52:44,518 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-03-08 22:53:02,980 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.5886, 3.6655, 2.3197, 3.8691, 4.6144, 2.4584, 3.3710, 3.5126], + device='cuda:1'), covar=tensor([0.0094, 0.1109, 0.1589, 0.0488, 0.0057, 0.1267, 0.0732, 0.0765], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0208, 0.0183, 0.0183, 0.0080, 0.0170, 0.0193, 0.0195], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 22:53:41,233 INFO [train.py:898] (1/4) Epoch 8, batch 2750, loss[loss=0.2067, simple_loss=0.2869, pruned_loss=0.06325, over 18488.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2869, pruned_loss=0.06199, over 3586627.95 frames. ], batch size: 51, lr: 1.40e-02, grad_scale: 4.0 +2023-03-08 22:53:45,511 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-08 22:54:29,063 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28229.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:54:40,878 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.331e+02 3.538e+02 4.348e+02 5.069e+02 1.374e+03, threshold=8.696e+02, percent-clipped=5.0 +2023-03-08 22:54:40,914 INFO [train.py:898] (1/4) Epoch 8, batch 2800, loss[loss=0.2163, simple_loss=0.3043, pruned_loss=0.06411, over 17760.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2872, pruned_loss=0.0622, over 3577184.18 frames. ], batch size: 70, lr: 1.40e-02, grad_scale: 8.0 +2023-03-08 22:54:59,982 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28255.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 22:55:37,537 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28287.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:55:39,230 INFO [train.py:898] (1/4) Epoch 8, batch 2850, loss[loss=0.1977, simple_loss=0.2664, pruned_loss=0.06452, over 18467.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2863, pruned_loss=0.06168, over 3591769.45 frames. ], batch size: 43, lr: 1.39e-02, grad_scale: 8.0 +2023-03-08 22:55:40,863 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28290.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:55:55,498 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.4935, 3.7437, 2.5688, 3.7428, 4.5495, 2.4774, 3.3977, 3.4655], + device='cuda:1'), covar=tensor([0.0107, 0.1004, 0.1395, 0.0533, 0.0055, 0.1262, 0.0686, 0.0742], + device='cuda:1'), in_proj_covar=tensor([0.0099, 0.0203, 0.0180, 0.0179, 0.0079, 0.0169, 0.0191, 0.0191], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 22:56:11,259 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28316.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 22:56:38,157 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.182e+02 3.670e+02 4.425e+02 5.374e+02 1.143e+03, threshold=8.851e+02, percent-clipped=3.0 +2023-03-08 22:56:38,183 INFO [train.py:898] (1/4) Epoch 8, batch 2900, loss[loss=0.241, simple_loss=0.316, pruned_loss=0.08302, over 13089.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2864, pruned_loss=0.06162, over 3587011.04 frames. ], batch size: 130, lr: 1.39e-02, grad_scale: 8.0 +2023-03-08 22:56:49,649 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28348.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:57:14,649 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28370.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 22:57:22,820 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-03-08 22:57:36,801 INFO [train.py:898] (1/4) Epoch 8, batch 2950, loss[loss=0.2651, simple_loss=0.3349, pruned_loss=0.09765, over 12537.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2863, pruned_loss=0.06177, over 3584861.33 frames. ], batch size: 129, lr: 1.39e-02, grad_scale: 8.0 +2023-03-08 22:57:56,164 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2103, 5.8019, 5.2481, 5.5319, 5.2942, 5.1926, 5.8073, 5.7912], + device='cuda:1'), covar=tensor([0.1125, 0.0547, 0.0524, 0.0666, 0.1417, 0.0731, 0.0567, 0.0590], + device='cuda:1'), in_proj_covar=tensor([0.0480, 0.0383, 0.0305, 0.0431, 0.0585, 0.0432, 0.0529, 0.0416], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-08 22:58:07,822 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2829, 5.5428, 3.0446, 5.3480, 5.2832, 5.5885, 5.3597, 2.9852], + device='cuda:1'), covar=tensor([0.0139, 0.0057, 0.0596, 0.0051, 0.0053, 0.0055, 0.0077, 0.0771], + device='cuda:1'), in_proj_covar=tensor([0.0070, 0.0060, 0.0085, 0.0074, 0.0069, 0.0057, 0.0071, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 22:58:08,188 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-03-08 22:58:11,172 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28418.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 22:58:36,005 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.441e+02 3.351e+02 4.042e+02 5.395e+02 3.528e+03, threshold=8.084e+02, percent-clipped=8.0 +2023-03-08 22:58:36,041 INFO [train.py:898] (1/4) Epoch 8, batch 3000, loss[loss=0.1953, simple_loss=0.2829, pruned_loss=0.05382, over 18388.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2857, pruned_loss=0.06136, over 3586612.48 frames. ], batch size: 48, lr: 1.39e-02, grad_scale: 8.0 +2023-03-08 22:58:36,041 INFO [train.py:923] (1/4) Computing validation loss +2023-03-08 22:58:43,017 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.5088, 4.9988, 4.8785, 4.7838, 4.5237, 4.4388, 5.0287, 4.9903], + device='cuda:1'), covar=tensor([0.1206, 0.0668, 0.0361, 0.0751, 0.1740, 0.0877, 0.0648, 0.0751], + device='cuda:1'), in_proj_covar=tensor([0.0477, 0.0377, 0.0302, 0.0425, 0.0582, 0.0427, 0.0524, 0.0412], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-08 22:58:47,831 INFO [train.py:932] (1/4) Epoch 8, validation: loss=0.165, simple_loss=0.2676, pruned_loss=0.03118, over 944034.00 frames. +2023-03-08 22:58:47,832 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-08 22:58:54,254 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-08 22:59:42,575 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28486.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:59:46,115 INFO [train.py:898] (1/4) Epoch 8, batch 3050, loss[loss=0.2196, simple_loss=0.2987, pruned_loss=0.07024, over 16097.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2862, pruned_loss=0.06195, over 3583774.17 frames. ], batch size: 94, lr: 1.39e-02, grad_scale: 8.0 +2023-03-08 22:59:49,928 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28492.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:59:54,456 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6755, 4.1410, 2.7477, 3.9328, 3.9510, 4.1670, 3.9734, 2.6832], + device='cuda:1'), covar=tensor([0.0145, 0.0063, 0.0554, 0.0157, 0.0073, 0.0060, 0.0110, 0.0815], + device='cuda:1'), in_proj_covar=tensor([0.0070, 0.0060, 0.0085, 0.0073, 0.0069, 0.0057, 0.0071, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 23:00:44,057 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.579e+02 3.667e+02 4.403e+02 5.909e+02 1.221e+03, threshold=8.806e+02, percent-clipped=6.0 +2023-03-08 23:00:44,083 INFO [train.py:898] (1/4) Epoch 8, batch 3100, loss[loss=0.2606, simple_loss=0.3199, pruned_loss=0.1006, over 12520.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2867, pruned_loss=0.06224, over 3567758.36 frames. ], batch size: 129, lr: 1.39e-02, grad_scale: 8.0 +2023-03-08 23:00:50,107 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.25 vs. limit=5.0 +2023-03-08 23:00:54,220 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28547.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:01:01,071 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28553.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:01:39,116 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28585.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:01:43,487 INFO [train.py:898] (1/4) Epoch 8, batch 3150, loss[loss=0.2031, simple_loss=0.2941, pruned_loss=0.05603, over 18616.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2866, pruned_loss=0.06202, over 3575910.63 frames. ], batch size: 52, lr: 1.39e-02, grad_scale: 8.0 +2023-03-08 23:01:59,178 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0530, 5.0894, 2.6697, 4.9268, 4.8074, 5.1657, 4.8233, 2.6509], + device='cuda:1'), covar=tensor([0.0145, 0.0065, 0.0697, 0.0079, 0.0076, 0.0065, 0.0110, 0.0940], + device='cuda:1'), in_proj_covar=tensor([0.0071, 0.0061, 0.0086, 0.0075, 0.0071, 0.0058, 0.0072, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 23:02:10,537 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28611.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 23:02:26,378 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0800, 5.0349, 4.3603, 4.9915, 5.0150, 4.5820, 4.9647, 4.6107], + device='cuda:1'), covar=tensor([0.0596, 0.0752, 0.2012, 0.0983, 0.0744, 0.0549, 0.0568, 0.1162], + device='cuda:1'), in_proj_covar=tensor([0.0370, 0.0428, 0.0563, 0.0337, 0.0306, 0.0389, 0.0414, 0.0523], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-08 23:02:43,325 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.977e+02 3.448e+02 4.056e+02 5.168e+02 1.166e+03, threshold=8.112e+02, percent-clipped=4.0 +2023-03-08 23:02:43,352 INFO [train.py:898] (1/4) Epoch 8, batch 3200, loss[loss=0.188, simple_loss=0.2806, pruned_loss=0.04771, over 18353.00 frames. ], tot_loss[loss=0.2056, simple_loss=0.2869, pruned_loss=0.06211, over 3569496.46 frames. ], batch size: 55, lr: 1.39e-02, grad_scale: 8.0 +2023-03-08 23:02:48,217 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28643.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:03:23,632 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9936, 2.8885, 2.0116, 3.3049, 2.4677, 3.2205, 2.0640, 2.8126], + device='cuda:1'), covar=tensor([0.0449, 0.0654, 0.1117, 0.0433, 0.0739, 0.0282, 0.1002, 0.0394], + device='cuda:1'), in_proj_covar=tensor([0.0173, 0.0201, 0.0173, 0.0209, 0.0172, 0.0208, 0.0184, 0.0174], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-08 23:03:27,492 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-08 23:03:42,537 INFO [train.py:898] (1/4) Epoch 8, batch 3250, loss[loss=0.2142, simple_loss=0.2942, pruned_loss=0.06715, over 18319.00 frames. ], tot_loss[loss=0.2055, simple_loss=0.2865, pruned_loss=0.06231, over 3576724.77 frames. ], batch size: 54, lr: 1.39e-02, grad_scale: 4.0 +2023-03-08 23:03:48,120 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.61 vs. limit=2.0 +2023-03-08 23:04:14,286 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.61 vs. limit=5.0 +2023-03-08 23:04:42,244 INFO [train.py:898] (1/4) Epoch 8, batch 3300, loss[loss=0.2132, simple_loss=0.3104, pruned_loss=0.05802, over 18497.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2856, pruned_loss=0.06162, over 3584965.26 frames. ], batch size: 53, lr: 1.38e-02, grad_scale: 4.0 +2023-03-08 23:04:43,379 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.421e+02 3.488e+02 4.190e+02 5.293e+02 2.938e+03, threshold=8.380e+02, percent-clipped=5.0 +2023-03-08 23:05:41,144 INFO [train.py:898] (1/4) Epoch 8, batch 3350, loss[loss=0.2111, simple_loss=0.287, pruned_loss=0.06754, over 18551.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2841, pruned_loss=0.06069, over 3594695.50 frames. ], batch size: 49, lr: 1.38e-02, grad_scale: 4.0 +2023-03-08 23:06:40,371 INFO [train.py:898] (1/4) Epoch 8, batch 3400, loss[loss=0.2287, simple_loss=0.3185, pruned_loss=0.06945, over 18509.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2834, pruned_loss=0.06035, over 3606105.45 frames. ], batch size: 53, lr: 1.38e-02, grad_scale: 4.0 +2023-03-08 23:06:40,697 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1797, 4.1046, 3.8662, 4.1320, 4.1426, 3.7182, 4.0971, 3.9590], + device='cuda:1'), covar=tensor([0.0484, 0.0754, 0.1398, 0.0678, 0.0535, 0.0469, 0.0472, 0.0966], + device='cuda:1'), in_proj_covar=tensor([0.0361, 0.0422, 0.0555, 0.0331, 0.0300, 0.0382, 0.0406, 0.0510], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-08 23:06:41,526 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.584e+02 3.993e+02 4.817e+02 5.813e+02 1.322e+03, threshold=9.634e+02, percent-clipped=7.0 +2023-03-08 23:06:41,959 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7260, 4.4611, 4.6960, 3.2934, 3.6757, 3.7055, 2.6187, 2.3071], + device='cuda:1'), covar=tensor([0.0228, 0.0188, 0.0051, 0.0257, 0.0317, 0.0184, 0.0680, 0.0825], + device='cuda:1'), in_proj_covar=tensor([0.0052, 0.0044, 0.0042, 0.0056, 0.0075, 0.0052, 0.0069, 0.0075], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 23:06:44,149 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28842.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:06:50,912 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28848.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:07:07,374 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28861.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:07:35,095 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28885.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:07:39,414 INFO [train.py:898] (1/4) Epoch 8, batch 3450, loss[loss=0.225, simple_loss=0.3091, pruned_loss=0.07049, over 17270.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2841, pruned_loss=0.06065, over 3585250.24 frames. ], batch size: 78, lr: 1.38e-02, grad_scale: 4.0 +2023-03-08 23:07:44,838 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.98 vs. limit=2.0 +2023-03-08 23:08:06,073 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28911.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 23:08:18,931 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28922.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:08:31,081 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28933.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:08:38,989 INFO [train.py:898] (1/4) Epoch 8, batch 3500, loss[loss=0.2439, simple_loss=0.3223, pruned_loss=0.08279, over 18480.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2837, pruned_loss=0.06066, over 3587586.70 frames. ], batch size: 59, lr: 1.38e-02, grad_scale: 4.0 +2023-03-08 23:08:40,151 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.088e+02 3.743e+02 4.474e+02 5.691e+02 1.966e+03, threshold=8.949e+02, percent-clipped=4.0 +2023-03-08 23:08:43,931 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28943.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:09:02,281 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28959.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 23:09:35,649 INFO [train.py:898] (1/4) Epoch 8, batch 3550, loss[loss=0.2013, simple_loss=0.2925, pruned_loss=0.05502, over 18583.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2836, pruned_loss=0.0603, over 3589798.38 frames. ], batch size: 54, lr: 1.38e-02, grad_scale: 4.0 +2023-03-08 23:09:37,855 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28991.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:09:50,555 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29002.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:10:30,399 INFO [train.py:898] (1/4) Epoch 8, batch 3600, loss[loss=0.2477, simple_loss=0.3086, pruned_loss=0.09343, over 12940.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.284, pruned_loss=0.06033, over 3586139.19 frames. ], batch size: 130, lr: 1.38e-02, grad_scale: 8.0 +2023-03-08 23:10:31,431 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.295e+02 3.435e+02 4.194e+02 5.068e+02 1.055e+03, threshold=8.389e+02, percent-clipped=1.0 +2023-03-08 23:10:56,746 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29063.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:11:37,020 INFO [train.py:898] (1/4) Epoch 9, batch 0, loss[loss=0.2092, simple_loss=0.2955, pruned_loss=0.06149, over 18295.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2955, pruned_loss=0.06149, over 18295.00 frames. ], batch size: 54, lr: 1.30e-02, grad_scale: 8.0 +2023-03-08 23:11:37,021 INFO [train.py:923] (1/4) Computing validation loss +2023-03-08 23:11:48,956 INFO [train.py:932] (1/4) Epoch 9, validation: loss=0.1674, simple_loss=0.2698, pruned_loss=0.03254, over 944034.00 frames. +2023-03-08 23:11:48,956 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-08 23:12:38,170 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6647, 5.2425, 5.2321, 5.1848, 4.8494, 5.1026, 4.5394, 5.0746], + device='cuda:1'), covar=tensor([0.0187, 0.0260, 0.0174, 0.0252, 0.0268, 0.0215, 0.0991, 0.0242], + device='cuda:1'), in_proj_covar=tensor([0.0156, 0.0197, 0.0187, 0.0213, 0.0197, 0.0207, 0.0264, 0.0190], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:1') +2023-03-08 23:12:44,138 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3961, 2.7247, 2.4166, 2.5926, 3.4247, 3.3008, 2.8077, 2.7427], + device='cuda:1'), covar=tensor([0.0167, 0.0290, 0.0705, 0.0441, 0.0171, 0.0153, 0.0419, 0.0406], + device='cuda:1'), in_proj_covar=tensor([0.0106, 0.0093, 0.0146, 0.0125, 0.0091, 0.0075, 0.0120, 0.0115], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 23:12:48,241 INFO [train.py:898] (1/4) Epoch 9, batch 50, loss[loss=0.2119, simple_loss=0.2981, pruned_loss=0.06283, over 18310.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2826, pruned_loss=0.05809, over 817649.91 frames. ], batch size: 54, lr: 1.30e-02, grad_scale: 8.0 +2023-03-08 23:13:08,330 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.388e+02 3.677e+02 4.281e+02 4.949e+02 1.360e+03, threshold=8.563e+02, percent-clipped=6.0 +2023-03-08 23:13:10,910 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29142.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:13:18,819 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29148.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:13:25,809 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5196, 3.3734, 2.0757, 4.2242, 2.8253, 4.4012, 2.2935, 3.8975], + device='cuda:1'), covar=tensor([0.0496, 0.0792, 0.1310, 0.0463, 0.0882, 0.0198, 0.1180, 0.0340], + device='cuda:1'), in_proj_covar=tensor([0.0174, 0.0202, 0.0172, 0.0210, 0.0170, 0.0207, 0.0185, 0.0174], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 23:13:47,195 INFO [train.py:898] (1/4) Epoch 9, batch 100, loss[loss=0.2057, simple_loss=0.287, pruned_loss=0.06217, over 18544.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2859, pruned_loss=0.06108, over 1429468.63 frames. ], batch size: 49, lr: 1.30e-02, grad_scale: 8.0 +2023-03-08 23:14:07,639 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29190.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:14:14,314 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29196.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:14:36,155 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4926, 4.5397, 2.5618, 4.5698, 5.5613, 2.6432, 4.1919, 4.0469], + device='cuda:1'), covar=tensor([0.0061, 0.0922, 0.1514, 0.0474, 0.0038, 0.1203, 0.0535, 0.0682], + device='cuda:1'), in_proj_covar=tensor([0.0101, 0.0209, 0.0184, 0.0184, 0.0081, 0.0168, 0.0198, 0.0194], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 23:14:39,427 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29217.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:14:46,033 INFO [train.py:898] (1/4) Epoch 9, batch 150, loss[loss=0.2006, simple_loss=0.285, pruned_loss=0.05815, over 18378.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2826, pruned_loss=0.05943, over 1922382.36 frames. ], batch size: 50, lr: 1.30e-02, grad_scale: 8.0 +2023-03-08 23:14:56,813 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-08 23:15:00,930 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29236.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:15:05,098 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.617e+02 3.595e+02 4.206e+02 4.990e+02 1.116e+03, threshold=8.412e+02, percent-clipped=1.0 +2023-03-08 23:15:44,239 INFO [train.py:898] (1/4) Epoch 9, batch 200, loss[loss=0.1907, simple_loss=0.2782, pruned_loss=0.05162, over 17761.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2833, pruned_loss=0.05996, over 2288295.66 frames. ], batch size: 70, lr: 1.30e-02, grad_scale: 8.0 +2023-03-08 23:15:53,310 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-08 23:16:04,315 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2694, 5.1434, 5.3937, 5.3661, 5.0818, 5.9518, 5.6300, 5.2525], + device='cuda:1'), covar=tensor([0.0972, 0.0714, 0.0700, 0.0711, 0.1422, 0.0799, 0.0705, 0.1776], + device='cuda:1'), in_proj_covar=tensor([0.0285, 0.0218, 0.0226, 0.0224, 0.0269, 0.0328, 0.0212, 0.0312], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-08 23:16:13,828 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29297.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:16:27,228 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5739, 2.7575, 2.3855, 2.8016, 3.5944, 3.5002, 2.8801, 2.9531], + device='cuda:1'), covar=tensor([0.0155, 0.0264, 0.0574, 0.0291, 0.0136, 0.0126, 0.0335, 0.0298], + device='cuda:1'), in_proj_covar=tensor([0.0107, 0.0092, 0.0142, 0.0125, 0.0091, 0.0075, 0.0120, 0.0115], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 23:16:44,290 INFO [train.py:898] (1/4) Epoch 9, batch 250, loss[loss=0.2007, simple_loss=0.2865, pruned_loss=0.05745, over 18493.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2827, pruned_loss=0.05901, over 2581859.02 frames. ], batch size: 53, lr: 1.30e-02, grad_scale: 8.0 +2023-03-08 23:17:03,793 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.373e+02 3.539e+02 4.403e+02 5.304e+02 1.212e+03, threshold=8.805e+02, percent-clipped=3.0 +2023-03-08 23:17:25,607 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29358.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:17:36,891 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.3751, 3.8535, 5.4164, 4.4357, 2.9925, 2.5977, 4.6096, 5.4841], + device='cuda:1'), covar=tensor([0.0948, 0.1576, 0.0051, 0.0320, 0.0939, 0.1233, 0.0309, 0.0089], + device='cuda:1'), in_proj_covar=tensor([0.0132, 0.0223, 0.0085, 0.0149, 0.0167, 0.0169, 0.0158, 0.0121], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-08 23:17:43,327 INFO [train.py:898] (1/4) Epoch 9, batch 300, loss[loss=0.2273, simple_loss=0.3145, pruned_loss=0.07001, over 17149.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2831, pruned_loss=0.05907, over 2800617.35 frames. ], batch size: 78, lr: 1.30e-02, grad_scale: 8.0 +2023-03-08 23:17:55,311 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.3788, 5.4952, 2.7420, 5.2538, 5.1079, 5.5460, 5.3583, 2.7489], + device='cuda:1'), covar=tensor([0.0130, 0.0046, 0.0701, 0.0054, 0.0061, 0.0041, 0.0067, 0.0859], + device='cuda:1'), in_proj_covar=tensor([0.0070, 0.0060, 0.0086, 0.0074, 0.0071, 0.0058, 0.0070, 0.0089], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 23:18:42,987 INFO [train.py:898] (1/4) Epoch 9, batch 350, loss[loss=0.1848, simple_loss=0.2613, pruned_loss=0.05413, over 18506.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2834, pruned_loss=0.05938, over 2971804.24 frames. ], batch size: 44, lr: 1.30e-02, grad_scale: 8.0 +2023-03-08 23:19:02,291 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.167e+02 3.494e+02 4.009e+02 5.079e+02 1.236e+03, threshold=8.018e+02, percent-clipped=2.0 +2023-03-08 23:19:41,978 INFO [train.py:898] (1/4) Epoch 9, batch 400, loss[loss=0.1813, simple_loss=0.2669, pruned_loss=0.04785, over 18286.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2849, pruned_loss=0.05995, over 3102139.18 frames. ], batch size: 49, lr: 1.29e-02, grad_scale: 8.0 +2023-03-08 23:19:59,314 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3332, 3.0966, 1.5546, 3.7175, 2.5520, 3.5754, 1.9107, 2.9791], + device='cuda:1'), covar=tensor([0.0409, 0.0687, 0.1442, 0.0389, 0.0794, 0.0367, 0.1257, 0.0507], + device='cuda:1'), in_proj_covar=tensor([0.0177, 0.0204, 0.0176, 0.0216, 0.0174, 0.0215, 0.0185, 0.0176], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-08 23:20:31,264 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4272, 5.3527, 4.9598, 5.3187, 5.3195, 4.6520, 5.2120, 4.9653], + device='cuda:1'), covar=tensor([0.0365, 0.0438, 0.1359, 0.0786, 0.0482, 0.0408, 0.0369, 0.0917], + device='cuda:1'), in_proj_covar=tensor([0.0364, 0.0426, 0.0573, 0.0342, 0.0309, 0.0385, 0.0410, 0.0520], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-08 23:20:33,994 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29517.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:20:40,638 INFO [train.py:898] (1/4) Epoch 9, batch 450, loss[loss=0.2401, simple_loss=0.3119, pruned_loss=0.08415, over 18090.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2845, pruned_loss=0.0598, over 3214184.57 frames. ], batch size: 62, lr: 1.29e-02, grad_scale: 8.0 +2023-03-08 23:20:59,762 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.081e+02 3.569e+02 4.141e+02 5.253e+02 9.990e+02, threshold=8.283e+02, percent-clipped=5.0 +2023-03-08 23:21:12,889 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29551.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:21:29,964 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29565.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:21:40,047 INFO [train.py:898] (1/4) Epoch 9, batch 500, loss[loss=0.203, simple_loss=0.2897, pruned_loss=0.05815, over 16035.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2839, pruned_loss=0.05973, over 3304365.57 frames. ], batch size: 94, lr: 1.29e-02, grad_scale: 8.0 +2023-03-08 23:21:45,948 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3961, 6.0756, 5.5336, 5.7790, 5.5710, 5.5137, 6.1109, 6.0163], + device='cuda:1'), covar=tensor([0.1145, 0.0501, 0.0410, 0.0656, 0.1213, 0.0619, 0.0429, 0.0517], + device='cuda:1'), in_proj_covar=tensor([0.0477, 0.0383, 0.0299, 0.0426, 0.0583, 0.0423, 0.0537, 0.0415], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-08 23:22:01,737 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29592.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:22:25,388 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29612.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:22:38,317 INFO [train.py:898] (1/4) Epoch 9, batch 550, loss[loss=0.2152, simple_loss=0.3044, pruned_loss=0.06301, over 17335.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.282, pruned_loss=0.05911, over 3375260.48 frames. ], batch size: 79, lr: 1.29e-02, grad_scale: 8.0 +2023-03-08 23:22:58,660 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.384e+02 3.638e+02 4.636e+02 5.710e+02 1.392e+03, threshold=9.272e+02, percent-clipped=6.0 +2023-03-08 23:23:01,332 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6121, 5.2963, 5.2126, 5.2213, 4.8033, 5.1323, 4.4578, 5.0278], + device='cuda:1'), covar=tensor([0.0263, 0.0242, 0.0188, 0.0310, 0.0353, 0.0207, 0.1115, 0.0348], + device='cuda:1'), in_proj_covar=tensor([0.0159, 0.0202, 0.0189, 0.0218, 0.0202, 0.0207, 0.0273, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:1') +2023-03-08 23:23:19,539 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29658.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:23:24,147 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29662.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:23:37,406 INFO [train.py:898] (1/4) Epoch 9, batch 600, loss[loss=0.1977, simple_loss=0.2802, pruned_loss=0.05756, over 18422.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2831, pruned_loss=0.05946, over 3428181.90 frames. ], batch size: 48, lr: 1.29e-02, grad_scale: 8.0 +2023-03-08 23:24:16,695 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29706.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:24:36,232 INFO [train.py:898] (1/4) Epoch 9, batch 650, loss[loss=0.2112, simple_loss=0.2978, pruned_loss=0.0623, over 18619.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2817, pruned_loss=0.05888, over 3461546.76 frames. ], batch size: 52, lr: 1.29e-02, grad_scale: 8.0 +2023-03-08 23:24:36,675 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29723.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 23:24:57,205 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.211e+02 3.283e+02 4.124e+02 5.101e+02 2.453e+03, threshold=8.247e+02, percent-clipped=7.0 +2023-03-08 23:25:35,094 INFO [train.py:898] (1/4) Epoch 9, batch 700, loss[loss=0.2203, simple_loss=0.3075, pruned_loss=0.06653, over 17019.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2823, pruned_loss=0.059, over 3481329.01 frames. ], batch size: 78, lr: 1.29e-02, grad_scale: 8.0 +2023-03-08 23:25:41,013 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.70 vs. limit=2.0 +2023-03-08 23:26:03,250 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.76 vs. limit=5.0 +2023-03-08 23:26:10,217 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29802.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:26:34,255 INFO [train.py:898] (1/4) Epoch 9, batch 750, loss[loss=0.2165, simple_loss=0.31, pruned_loss=0.06144, over 18494.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.282, pruned_loss=0.05857, over 3504287.74 frames. ], batch size: 53, lr: 1.29e-02, grad_scale: 8.0 +2023-03-08 23:26:55,142 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.192e+02 3.216e+02 4.021e+02 4.703e+02 9.794e+02, threshold=8.041e+02, percent-clipped=2.0 +2023-03-08 23:27:21,802 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29863.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:27:26,706 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-08 23:27:31,457 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-08 23:27:32,992 INFO [train.py:898] (1/4) Epoch 9, batch 800, loss[loss=0.1647, simple_loss=0.246, pruned_loss=0.04172, over 18507.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2818, pruned_loss=0.05855, over 3524740.10 frames. ], batch size: 44, lr: 1.29e-02, grad_scale: 8.0 +2023-03-08 23:27:56,519 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29892.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:28:14,027 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29907.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:28:32,034 INFO [train.py:898] (1/4) Epoch 9, batch 850, loss[loss=0.1641, simple_loss=0.25, pruned_loss=0.03911, over 18246.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.282, pruned_loss=0.05839, over 3544106.89 frames. ], batch size: 45, lr: 1.29e-02, grad_scale: 8.0 +2023-03-08 23:28:41,044 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.51 vs. limit=5.0 +2023-03-08 23:28:52,841 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.076e+02 3.769e+02 4.369e+02 5.199e+02 8.545e+02, threshold=8.737e+02, percent-clipped=2.0 +2023-03-08 23:28:53,055 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29940.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:29:13,494 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0669, 5.6518, 5.2926, 5.4008, 5.0824, 5.0786, 5.6886, 5.6186], + device='cuda:1'), covar=tensor([0.1184, 0.0633, 0.0499, 0.0701, 0.1645, 0.0753, 0.0588, 0.0633], + device='cuda:1'), in_proj_covar=tensor([0.0487, 0.0391, 0.0303, 0.0431, 0.0603, 0.0431, 0.0554, 0.0424], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-08 23:29:26,228 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3075, 5.0793, 5.5210, 5.4796, 5.1506, 6.0292, 5.7013, 5.2980], + device='cuda:1'), covar=tensor([0.0939, 0.0666, 0.0566, 0.0559, 0.1358, 0.0661, 0.0573, 0.1436], + device='cuda:1'), in_proj_covar=tensor([0.0286, 0.0219, 0.0226, 0.0224, 0.0268, 0.0324, 0.0213, 0.0312], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-08 23:29:31,701 INFO [train.py:898] (1/4) Epoch 9, batch 900, loss[loss=0.2364, simple_loss=0.3115, pruned_loss=0.08064, over 18490.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.282, pruned_loss=0.05819, over 3563792.40 frames. ], batch size: 59, lr: 1.28e-02, grad_scale: 8.0 +2023-03-08 23:30:30,149 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30018.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 23:30:31,574 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7361, 3.6858, 5.3140, 4.4439, 3.2109, 2.9849, 4.4073, 5.3746], + device='cuda:1'), covar=tensor([0.0799, 0.1491, 0.0061, 0.0284, 0.0892, 0.1100, 0.0361, 0.0112], + device='cuda:1'), in_proj_covar=tensor([0.0132, 0.0224, 0.0087, 0.0149, 0.0168, 0.0170, 0.0157, 0.0122], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-08 23:30:35,483 INFO [train.py:898] (1/4) Epoch 9, batch 950, loss[loss=0.1963, simple_loss=0.2733, pruned_loss=0.05961, over 18509.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.283, pruned_loss=0.05883, over 3569580.09 frames. ], batch size: 47, lr: 1.28e-02, grad_scale: 8.0 +2023-03-08 23:30:56,116 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.525e+02 3.421e+02 3.887e+02 4.758e+02 7.533e+02, threshold=7.773e+02, percent-clipped=0.0 +2023-03-08 23:31:08,744 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.5395, 5.2477, 5.2274, 5.2000, 4.7578, 5.0779, 4.4385, 5.0083], + device='cuda:1'), covar=tensor([0.0279, 0.0292, 0.0192, 0.0335, 0.0392, 0.0248, 0.1305, 0.0323], + device='cuda:1'), in_proj_covar=tensor([0.0158, 0.0201, 0.0189, 0.0215, 0.0200, 0.0209, 0.0271, 0.0195], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:1') +2023-03-08 23:31:34,989 INFO [train.py:898] (1/4) Epoch 9, batch 1000, loss[loss=0.1971, simple_loss=0.2792, pruned_loss=0.05748, over 18307.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2827, pruned_loss=0.05893, over 3578152.61 frames. ], batch size: 54, lr: 1.28e-02, grad_scale: 8.0 +2023-03-08 23:32:19,869 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-08 23:32:34,158 INFO [train.py:898] (1/4) Epoch 9, batch 1050, loss[loss=0.2061, simple_loss=0.2904, pruned_loss=0.06092, over 18487.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2825, pruned_loss=0.05885, over 3582776.58 frames. ], batch size: 53, lr: 1.28e-02, grad_scale: 8.0 +2023-03-08 23:32:36,869 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2794, 5.4465, 2.6900, 5.2212, 5.2002, 5.5201, 5.3152, 2.8117], + device='cuda:1'), covar=tensor([0.0140, 0.0052, 0.0731, 0.0061, 0.0053, 0.0052, 0.0094, 0.0907], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0061, 0.0086, 0.0076, 0.0071, 0.0058, 0.0073, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 23:32:36,914 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30125.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:32:53,968 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.406e+02 3.588e+02 4.236e+02 5.310e+02 1.075e+03, threshold=8.473e+02, percent-clipped=3.0 +2023-03-08 23:33:15,621 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30158.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:33:32,455 INFO [train.py:898] (1/4) Epoch 9, batch 1100, loss[loss=0.2158, simple_loss=0.2983, pruned_loss=0.06666, over 17702.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2829, pruned_loss=0.05895, over 3599061.46 frames. ], batch size: 70, lr: 1.28e-02, grad_scale: 8.0 +2023-03-08 23:33:47,807 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30186.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:34:12,838 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30207.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:34:15,693 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30209.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:34:31,345 INFO [train.py:898] (1/4) Epoch 9, batch 1150, loss[loss=0.1804, simple_loss=0.2531, pruned_loss=0.05391, over 18126.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2822, pruned_loss=0.05915, over 3607067.80 frames. ], batch size: 40, lr: 1.28e-02, grad_scale: 8.0 +2023-03-08 23:34:50,754 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.527e+02 3.691e+02 4.407e+02 5.401e+02 1.436e+03, threshold=8.814e+02, percent-clipped=4.0 +2023-03-08 23:35:09,079 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30255.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:35:27,513 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30270.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:35:30,584 INFO [train.py:898] (1/4) Epoch 9, batch 1200, loss[loss=0.1885, simple_loss=0.2726, pruned_loss=0.05221, over 18381.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2823, pruned_loss=0.05897, over 3599928.17 frames. ], batch size: 50, lr: 1.28e-02, grad_scale: 8.0 +2023-03-08 23:36:19,371 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-08 23:36:24,741 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30318.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 23:36:30,220 INFO [train.py:898] (1/4) Epoch 9, batch 1250, loss[loss=0.1852, simple_loss=0.2651, pruned_loss=0.05261, over 18412.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2817, pruned_loss=0.05866, over 3607198.75 frames. ], batch size: 48, lr: 1.28e-02, grad_scale: 8.0 +2023-03-08 23:36:49,693 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.180e+02 3.477e+02 4.151e+02 5.169e+02 1.110e+03, threshold=8.302e+02, percent-clipped=2.0 +2023-03-08 23:36:59,482 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.09 vs. limit=5.0 +2023-03-08 23:37:14,265 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30360.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:37:21,633 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30366.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:37:29,913 INFO [train.py:898] (1/4) Epoch 9, batch 1300, loss[loss=0.1791, simple_loss=0.2511, pruned_loss=0.0535, over 17634.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2825, pruned_loss=0.05917, over 3597489.86 frames. ], batch size: 39, lr: 1.28e-02, grad_scale: 8.0 +2023-03-08 23:37:41,627 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-03-08 23:38:02,561 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3276, 5.2998, 4.8106, 5.2963, 5.2205, 4.6832, 5.1769, 4.9115], + device='cuda:1'), covar=tensor([0.0367, 0.0369, 0.1316, 0.0574, 0.0514, 0.0343, 0.0332, 0.0865], + device='cuda:1'), in_proj_covar=tensor([0.0369, 0.0418, 0.0572, 0.0339, 0.0319, 0.0388, 0.0411, 0.0524], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-08 23:38:26,523 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30421.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:38:28,948 INFO [train.py:898] (1/4) Epoch 9, batch 1350, loss[loss=0.1827, simple_loss=0.2666, pruned_loss=0.04943, over 18540.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2808, pruned_loss=0.05838, over 3607739.61 frames. ], batch size: 49, lr: 1.27e-02, grad_scale: 8.0 +2023-03-08 23:38:48,277 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.096e+02 3.366e+02 4.141e+02 5.097e+02 1.343e+03, threshold=8.282e+02, percent-clipped=5.0 +2023-03-08 23:39:08,575 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-03-08 23:39:09,542 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30458.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:39:27,634 INFO [train.py:898] (1/4) Epoch 9, batch 1400, loss[loss=0.1955, simple_loss=0.2845, pruned_loss=0.05322, over 18481.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2809, pruned_loss=0.05803, over 3610056.92 frames. ], batch size: 51, lr: 1.27e-02, grad_scale: 8.0 +2023-03-08 23:39:37,253 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30481.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:39:41,106 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.32 vs. limit=5.0 +2023-03-08 23:39:47,785 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3668, 2.6281, 4.0713, 3.6855, 2.6551, 4.2924, 3.9514, 2.4921], + device='cuda:1'), covar=tensor([0.0495, 0.1208, 0.0177, 0.0272, 0.1294, 0.0173, 0.0335, 0.1125], + device='cuda:1'), in_proj_covar=tensor([0.0177, 0.0209, 0.0137, 0.0133, 0.0204, 0.0173, 0.0194, 0.0189], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 23:40:06,231 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30506.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:40:10,789 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-08 23:40:26,574 INFO [train.py:898] (1/4) Epoch 9, batch 1450, loss[loss=0.1769, simple_loss=0.2611, pruned_loss=0.0463, over 18269.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2802, pruned_loss=0.05769, over 3613904.01 frames. ], batch size: 49, lr: 1.27e-02, grad_scale: 8.0 +2023-03-08 23:40:46,480 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.569e+02 3.548e+02 4.179e+02 5.117e+02 1.123e+03, threshold=8.357e+02, percent-clipped=2.0 +2023-03-08 23:40:59,050 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2316, 5.0140, 5.3736, 5.3590, 5.1356, 5.9996, 5.6589, 5.3213], + device='cuda:1'), covar=tensor([0.0992, 0.0669, 0.0625, 0.0566, 0.1283, 0.0690, 0.0590, 0.1712], + device='cuda:1'), in_proj_covar=tensor([0.0290, 0.0217, 0.0230, 0.0228, 0.0272, 0.0326, 0.0212, 0.0323], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-08 23:41:16,137 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30565.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:41:24,909 INFO [train.py:898] (1/4) Epoch 9, batch 1500, loss[loss=0.1863, simple_loss=0.2773, pruned_loss=0.04765, over 18365.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2795, pruned_loss=0.05732, over 3615236.06 frames. ], batch size: 55, lr: 1.27e-02, grad_scale: 8.0 +2023-03-08 23:41:48,203 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-08 23:42:24,048 INFO [train.py:898] (1/4) Epoch 9, batch 1550, loss[loss=0.1897, simple_loss=0.2631, pruned_loss=0.05813, over 17746.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2793, pruned_loss=0.05742, over 3611897.77 frames. ], batch size: 39, lr: 1.27e-02, grad_scale: 8.0 +2023-03-08 23:42:44,353 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.188e+02 3.439e+02 3.957e+02 5.356e+02 1.144e+03, threshold=7.914e+02, percent-clipped=4.0 +2023-03-08 23:43:01,943 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30655.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:43:23,644 INFO [train.py:898] (1/4) Epoch 9, batch 1600, loss[loss=0.2041, simple_loss=0.2703, pruned_loss=0.06895, over 18422.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2795, pruned_loss=0.05739, over 3611445.88 frames. ], batch size: 43, lr: 1.27e-02, grad_scale: 8.0 +2023-03-08 23:43:56,291 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8881, 3.2990, 4.3222, 4.0421, 2.7790, 4.7334, 4.1456, 2.7067], + device='cuda:1'), covar=tensor([0.0328, 0.1046, 0.0213, 0.0246, 0.1447, 0.0128, 0.0302, 0.1144], + device='cuda:1'), in_proj_covar=tensor([0.0179, 0.0210, 0.0140, 0.0134, 0.0206, 0.0175, 0.0196, 0.0190], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-08 23:44:14,657 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30716.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:44:14,757 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30716.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 23:44:22,846 INFO [train.py:898] (1/4) Epoch 9, batch 1650, loss[loss=0.2168, simple_loss=0.2965, pruned_loss=0.06851, over 16984.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2802, pruned_loss=0.0577, over 3605549.08 frames. ], batch size: 78, lr: 1.27e-02, grad_scale: 16.0 +2023-03-08 23:44:43,336 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.525e+02 3.575e+02 4.515e+02 5.590e+02 1.202e+03, threshold=9.030e+02, percent-clipped=6.0 +2023-03-08 23:45:16,592 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-08 23:45:22,488 INFO [train.py:898] (1/4) Epoch 9, batch 1700, loss[loss=0.1774, simple_loss=0.2523, pruned_loss=0.05125, over 18426.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2815, pruned_loss=0.05826, over 3606608.59 frames. ], batch size: 43, lr: 1.27e-02, grad_scale: 16.0 +2023-03-08 23:45:26,447 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6027, 3.6623, 3.4650, 2.9556, 3.4480, 2.6133, 2.5714, 3.6569], + device='cuda:1'), covar=tensor([0.0036, 0.0056, 0.0068, 0.0112, 0.0070, 0.0146, 0.0160, 0.0042], + device='cuda:1'), in_proj_covar=tensor([0.0079, 0.0105, 0.0094, 0.0141, 0.0095, 0.0139, 0.0147, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-08 23:45:32,841 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30781.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:46:22,138 INFO [train.py:898] (1/4) Epoch 9, batch 1750, loss[loss=0.2238, simple_loss=0.3091, pruned_loss=0.06923, over 18097.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.281, pruned_loss=0.0579, over 3607310.45 frames. ], batch size: 62, lr: 1.27e-02, grad_scale: 16.0 +2023-03-08 23:46:29,097 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30829.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:46:43,007 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.320e+02 3.498e+02 4.092e+02 4.863e+02 1.038e+03, threshold=8.183e+02, percent-clipped=2.0 +2023-03-08 23:47:11,820 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30865.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:47:20,588 INFO [train.py:898] (1/4) Epoch 9, batch 1800, loss[loss=0.1692, simple_loss=0.2472, pruned_loss=0.04558, over 17621.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2807, pruned_loss=0.05777, over 3593928.67 frames. ], batch size: 39, lr: 1.27e-02, grad_scale: 8.0 +2023-03-08 23:47:57,656 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30903.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:48:09,040 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30913.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:48:20,087 INFO [train.py:898] (1/4) Epoch 9, batch 1850, loss[loss=0.2082, simple_loss=0.2981, pruned_loss=0.05916, over 18633.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2807, pruned_loss=0.05763, over 3601185.63 frames. ], batch size: 52, lr: 1.26e-02, grad_scale: 8.0 +2023-03-08 23:48:22,723 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30925.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:48:33,966 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30934.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:48:42,058 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.240e+02 3.365e+02 3.795e+02 4.610e+02 7.960e+02, threshold=7.590e+02, percent-clipped=0.0 +2023-03-08 23:49:09,547 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30964.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:49:19,316 INFO [train.py:898] (1/4) Epoch 9, batch 1900, loss[loss=0.1833, simple_loss=0.2643, pruned_loss=0.05112, over 18507.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2803, pruned_loss=0.05787, over 3589541.73 frames. ], batch size: 47, lr: 1.26e-02, grad_scale: 8.0 +2023-03-08 23:49:34,677 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30986.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:49:41,115 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.3807, 3.5961, 5.1586, 4.1622, 3.0546, 2.8339, 4.2186, 5.2197], + device='cuda:1'), covar=tensor([0.0886, 0.1501, 0.0071, 0.0352, 0.0916, 0.1134, 0.0368, 0.0114], + device='cuda:1'), in_proj_covar=tensor([0.0133, 0.0225, 0.0089, 0.0151, 0.0169, 0.0170, 0.0160, 0.0125], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-08 23:49:45,314 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30995.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:50:04,633 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31011.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 23:50:04,829 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31011.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 23:50:10,191 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31016.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:50:17,961 INFO [train.py:898] (1/4) Epoch 9, batch 1950, loss[loss=0.1864, simple_loss=0.2552, pruned_loss=0.05884, over 17220.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2811, pruned_loss=0.05817, over 3586624.42 frames. ], batch size: 38, lr: 1.26e-02, grad_scale: 8.0 +2023-03-08 23:50:39,081 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.073e+02 3.300e+02 4.115e+02 5.228e+02 1.013e+03, threshold=8.231e+02, percent-clipped=3.0 +2023-03-08 23:51:06,736 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31064.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:51:16,284 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31072.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 23:51:16,962 INFO [train.py:898] (1/4) Epoch 9, batch 2000, loss[loss=0.1869, simple_loss=0.2592, pruned_loss=0.05726, over 18434.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2799, pruned_loss=0.05748, over 3592029.51 frames. ], batch size: 42, lr: 1.26e-02, grad_scale: 8.0 +2023-03-08 23:52:15,796 INFO [train.py:898] (1/4) Epoch 9, batch 2050, loss[loss=0.1791, simple_loss=0.2596, pruned_loss=0.04924, over 18418.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2792, pruned_loss=0.05723, over 3593104.55 frames. ], batch size: 48, lr: 1.26e-02, grad_scale: 8.0 +2023-03-08 23:52:37,041 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.601e+02 3.647e+02 4.491e+02 5.379e+02 9.813e+02, threshold=8.982e+02, percent-clipped=3.0 +2023-03-08 23:53:07,946 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.90 vs. limit=5.0 +2023-03-08 23:53:15,222 INFO [train.py:898] (1/4) Epoch 9, batch 2100, loss[loss=0.2025, simple_loss=0.2869, pruned_loss=0.05908, over 18232.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2793, pruned_loss=0.05736, over 3585449.22 frames. ], batch size: 60, lr: 1.26e-02, grad_scale: 8.0 +2023-03-08 23:53:57,140 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-08 23:54:14,827 INFO [train.py:898] (1/4) Epoch 9, batch 2150, loss[loss=0.1682, simple_loss=0.2527, pruned_loss=0.0418, over 18416.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2798, pruned_loss=0.05725, over 3583368.90 frames. ], batch size: 48, lr: 1.26e-02, grad_scale: 8.0 +2023-03-08 23:54:35,296 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.335e+02 3.491e+02 4.023e+02 4.961e+02 1.002e+03, threshold=8.046e+02, percent-clipped=2.0 +2023-03-08 23:54:56,919 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31259.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:55:11,901 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.52 vs. limit=5.0 +2023-03-08 23:55:13,367 INFO [train.py:898] (1/4) Epoch 9, batch 2200, loss[loss=0.1998, simple_loss=0.2806, pruned_loss=0.05947, over 18099.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2802, pruned_loss=0.05731, over 3583909.05 frames. ], batch size: 62, lr: 1.26e-02, grad_scale: 4.0 +2023-03-08 23:55:22,621 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31281.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:55:32,535 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31290.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:55:53,490 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.60 vs. limit=5.0 +2023-03-08 23:55:57,286 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31311.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:56:03,071 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4051, 5.9689, 5.4289, 5.6800, 5.4091, 5.4557, 5.9746, 5.9650], + device='cuda:1'), covar=tensor([0.1089, 0.0685, 0.0433, 0.0662, 0.1598, 0.0671, 0.0564, 0.0645], + device='cuda:1'), in_proj_covar=tensor([0.0497, 0.0407, 0.0307, 0.0446, 0.0609, 0.0445, 0.0564, 0.0435], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-08 23:56:11,975 INFO [train.py:898] (1/4) Epoch 9, batch 2250, loss[loss=0.1646, simple_loss=0.2445, pruned_loss=0.04234, over 18420.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2807, pruned_loss=0.05786, over 3591613.12 frames. ], batch size: 42, lr: 1.26e-02, grad_scale: 4.0 +2023-03-08 23:56:33,579 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.219e+02 3.737e+02 4.347e+02 5.503e+02 2.827e+03, threshold=8.695e+02, percent-clipped=9.0 +2023-03-08 23:56:53,436 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31359.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:57:02,913 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31367.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 23:57:09,986 INFO [train.py:898] (1/4) Epoch 9, batch 2300, loss[loss=0.208, simple_loss=0.2915, pruned_loss=0.06221, over 17078.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2813, pruned_loss=0.05807, over 3597821.11 frames. ], batch size: 78, lr: 1.26e-02, grad_scale: 4.0 +2023-03-08 23:57:24,580 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1033, 3.4993, 3.5493, 2.9314, 3.0241, 2.9913, 2.4286, 2.0416], + device='cuda:1'), covar=tensor([0.0208, 0.0147, 0.0106, 0.0217, 0.0314, 0.0190, 0.0560, 0.0709], + device='cuda:1'), in_proj_covar=tensor([0.0054, 0.0044, 0.0043, 0.0056, 0.0076, 0.0052, 0.0070, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-08 23:57:52,673 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31409.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:58:09,031 INFO [train.py:898] (1/4) Epoch 9, batch 2350, loss[loss=0.2077, simple_loss=0.2922, pruned_loss=0.06162, over 18339.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2812, pruned_loss=0.05803, over 3596075.77 frames. ], batch size: 55, lr: 1.25e-02, grad_scale: 4.0 +2023-03-08 23:58:09,979 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.3313, 5.5348, 3.1711, 5.2896, 5.1821, 5.5638, 5.4368, 2.9048], + device='cuda:1'), covar=tensor([0.0138, 0.0045, 0.0633, 0.0064, 0.0067, 0.0052, 0.0067, 0.0911], + device='cuda:1'), in_proj_covar=tensor([0.0075, 0.0062, 0.0087, 0.0077, 0.0072, 0.0061, 0.0075, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-08 23:58:31,515 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.260e+02 3.212e+02 3.852e+02 4.857e+02 1.133e+03, threshold=7.704e+02, percent-clipped=2.0 +2023-03-08 23:59:04,424 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31470.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:59:07,378 INFO [train.py:898] (1/4) Epoch 9, batch 2400, loss[loss=0.1527, simple_loss=0.2352, pruned_loss=0.03508, over 18232.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2813, pruned_loss=0.05771, over 3600318.65 frames. ], batch size: 45, lr: 1.25e-02, grad_scale: 8.0 +2023-03-08 23:59:29,337 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31491.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 00:00:06,631 INFO [train.py:898] (1/4) Epoch 9, batch 2450, loss[loss=0.1573, simple_loss=0.2341, pruned_loss=0.04026, over 18479.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2807, pruned_loss=0.05743, over 3597159.85 frames. ], batch size: 44, lr: 1.25e-02, grad_scale: 8.0 +2023-03-09 00:00:29,270 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.799e+02 3.437e+02 4.058e+02 5.079e+02 1.109e+03, threshold=8.115e+02, percent-clipped=5.0 +2023-03-09 00:00:40,990 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31552.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 00:00:49,321 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31559.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:01:05,460 INFO [train.py:898] (1/4) Epoch 9, batch 2500, loss[loss=0.216, simple_loss=0.2975, pruned_loss=0.0673, over 18263.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.281, pruned_loss=0.05757, over 3598135.49 frames. ], batch size: 57, lr: 1.25e-02, grad_scale: 8.0 +2023-03-09 00:01:15,344 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31581.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:01:25,995 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31590.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:01:45,092 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31607.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:02:03,050 INFO [train.py:898] (1/4) Epoch 9, batch 2550, loss[loss=0.2079, simple_loss=0.2971, pruned_loss=0.05939, over 18513.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2813, pruned_loss=0.05769, over 3592379.69 frames. ], batch size: 59, lr: 1.25e-02, grad_scale: 8.0 +2023-03-09 00:02:10,590 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31629.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:02:21,837 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31638.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:02:26,082 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.346e+02 3.602e+02 4.315e+02 5.683e+02 1.136e+03, threshold=8.630e+02, percent-clipped=7.0 +2023-03-09 00:02:54,692 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31667.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 00:03:01,172 INFO [train.py:898] (1/4) Epoch 9, batch 2600, loss[loss=0.1912, simple_loss=0.2814, pruned_loss=0.05053, over 18631.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2807, pruned_loss=0.05754, over 3597686.85 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 8.0 +2023-03-09 00:03:37,758 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31704.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:03:41,274 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4119, 4.3706, 2.6447, 4.3554, 5.3768, 2.7837, 3.9756, 4.1426], + device='cuda:1'), covar=tensor([0.0065, 0.1131, 0.1422, 0.0528, 0.0044, 0.1037, 0.0566, 0.0632], + device='cuda:1'), in_proj_covar=tensor([0.0102, 0.0214, 0.0184, 0.0184, 0.0083, 0.0168, 0.0196, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 00:03:50,209 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31715.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 00:03:51,457 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.2575, 2.5896, 2.3553, 2.6314, 3.2856, 3.1587, 2.7833, 2.5887], + device='cuda:1'), covar=tensor([0.0240, 0.0270, 0.0636, 0.0361, 0.0159, 0.0159, 0.0359, 0.0315], + device='cuda:1'), in_proj_covar=tensor([0.0110, 0.0097, 0.0146, 0.0127, 0.0092, 0.0079, 0.0123, 0.0117], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 00:03:54,883 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2219, 4.2911, 2.3276, 4.3994, 5.2410, 2.5447, 3.6858, 3.7520], + device='cuda:1'), covar=tensor([0.0062, 0.1066, 0.1652, 0.0471, 0.0045, 0.1209, 0.0699, 0.0799], + device='cuda:1'), in_proj_covar=tensor([0.0102, 0.0214, 0.0184, 0.0185, 0.0083, 0.0169, 0.0197, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 00:03:58,992 INFO [train.py:898] (1/4) Epoch 9, batch 2650, loss[loss=0.1814, simple_loss=0.2706, pruned_loss=0.04612, over 18503.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2805, pruned_loss=0.05742, over 3594750.66 frames. ], batch size: 47, lr: 1.25e-02, grad_scale: 8.0 +2023-03-09 00:04:01,532 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2617, 4.2952, 4.4125, 4.0753, 4.1661, 4.1765, 4.4998, 4.4125], + device='cuda:1'), covar=tensor([0.0074, 0.0095, 0.0066, 0.0096, 0.0082, 0.0116, 0.0080, 0.0113], + device='cuda:1'), in_proj_covar=tensor([0.0074, 0.0054, 0.0055, 0.0068, 0.0059, 0.0079, 0.0067, 0.0067], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0002, 0.0002, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 00:04:21,586 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.212e+02 3.574e+02 4.199e+02 5.233e+02 1.424e+03, threshold=8.398e+02, percent-clipped=3.0 +2023-03-09 00:04:48,294 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31765.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:04:48,491 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31765.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:04:57,870 INFO [train.py:898] (1/4) Epoch 9, batch 2700, loss[loss=0.2187, simple_loss=0.302, pruned_loss=0.06772, over 18123.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2805, pruned_loss=0.05744, over 3587739.53 frames. ], batch size: 62, lr: 1.25e-02, grad_scale: 8.0 +2023-03-09 00:05:15,584 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31788.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:05:25,806 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.96 vs. limit=2.0 +2023-03-09 00:05:56,602 INFO [train.py:898] (1/4) Epoch 9, batch 2750, loss[loss=0.1815, simple_loss=0.2643, pruned_loss=0.04931, over 18380.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2814, pruned_loss=0.05755, over 3589166.90 frames. ], batch size: 50, lr: 1.25e-02, grad_scale: 8.0 +2023-03-09 00:05:56,853 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31823.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:06:19,346 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.068e+02 3.248e+02 4.036e+02 4.734e+02 1.785e+03, threshold=8.071e+02, percent-clipped=3.0 +2023-03-09 00:06:25,199 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31847.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 00:06:27,656 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31849.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:06:55,869 INFO [train.py:898] (1/4) Epoch 9, batch 2800, loss[loss=0.28, simple_loss=0.3388, pruned_loss=0.1107, over 12955.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2809, pruned_loss=0.05755, over 3591906.70 frames. ], batch size: 129, lr: 1.25e-02, grad_scale: 8.0 +2023-03-09 00:07:02,114 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31878.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:07:09,083 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31884.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:07:16,383 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6417, 2.1348, 2.6981, 2.8903, 3.6166, 5.3786, 4.6478, 4.1221], + device='cuda:1'), covar=tensor([0.1008, 0.1649, 0.2126, 0.1102, 0.1402, 0.0100, 0.0345, 0.0402], + device='cuda:1'), in_proj_covar=tensor([0.0221, 0.0277, 0.0286, 0.0238, 0.0346, 0.0167, 0.0242, 0.0187], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 00:07:42,387 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8252, 3.9089, 5.1037, 2.9430, 4.3491, 2.5691, 2.9525, 1.9699], + device='cuda:1'), covar=tensor([0.0859, 0.0689, 0.0076, 0.0610, 0.0524, 0.2058, 0.2268, 0.1679], + device='cuda:1'), in_proj_covar=tensor([0.0186, 0.0205, 0.0104, 0.0157, 0.0217, 0.0235, 0.0264, 0.0200], + device='cuda:1'), out_proj_covar=tensor([1.6474e-04, 1.8743e-04, 9.6610e-05, 1.4228e-04, 1.9755e-04, 2.1693e-04, + 2.4042e-04, 1.8518e-04], device='cuda:1') +2023-03-09 00:07:45,663 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31915.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:07:54,193 INFO [train.py:898] (1/4) Epoch 9, batch 2850, loss[loss=0.1981, simple_loss=0.283, pruned_loss=0.0566, over 18053.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2811, pruned_loss=0.05778, over 3582018.39 frames. ], batch size: 62, lr: 1.25e-02, grad_scale: 8.0 +2023-03-09 00:08:13,133 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31939.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:08:16,250 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.274e+02 3.441e+02 4.233e+02 5.343e+02 2.679e+03, threshold=8.467e+02, percent-clipped=8.0 +2023-03-09 00:08:26,675 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31950.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:08:30,564 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-03-09 00:08:52,762 INFO [train.py:898] (1/4) Epoch 9, batch 2900, loss[loss=0.2152, simple_loss=0.3062, pruned_loss=0.06212, over 18136.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2812, pruned_loss=0.05777, over 3582387.51 frames. ], batch size: 62, lr: 1.24e-02, grad_scale: 8.0 +2023-03-09 00:08:56,451 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31976.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:09:17,828 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-09 00:09:43,636 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32011.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:09:56,717 INFO [train.py:898] (1/4) Epoch 9, batch 2950, loss[loss=0.2157, simple_loss=0.2968, pruned_loss=0.06727, over 18375.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2818, pruned_loss=0.05798, over 3580016.49 frames. ], batch size: 50, lr: 1.24e-02, grad_scale: 8.0 +2023-03-09 00:10:19,142 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.223e+02 3.183e+02 3.919e+02 4.606e+02 8.522e+02, threshold=7.838e+02, percent-clipped=1.0 +2023-03-09 00:10:41,316 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32060.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:10:47,599 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32065.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:10:56,316 INFO [train.py:898] (1/4) Epoch 9, batch 3000, loss[loss=0.2227, simple_loss=0.3066, pruned_loss=0.06943, over 16135.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2806, pruned_loss=0.05753, over 3576677.86 frames. ], batch size: 94, lr: 1.24e-02, grad_scale: 8.0 +2023-03-09 00:10:56,317 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 00:11:08,348 INFO [train.py:932] (1/4) Epoch 9, validation: loss=0.1618, simple_loss=0.2644, pruned_loss=0.02958, over 944034.00 frames. +2023-03-09 00:11:08,349 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 00:11:42,775 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.39 vs. limit=5.0 +2023-03-09 00:11:55,414 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32113.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:12:07,319 INFO [train.py:898] (1/4) Epoch 9, batch 3050, loss[loss=0.2256, simple_loss=0.3003, pruned_loss=0.07546, over 12550.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2808, pruned_loss=0.05766, over 3580963.22 frames. ], batch size: 129, lr: 1.24e-02, grad_scale: 8.0 +2023-03-09 00:12:15,726 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8060, 3.8391, 3.5678, 3.1051, 3.5662, 2.9497, 2.8606, 3.7608], + device='cuda:1'), covar=tensor([0.0030, 0.0048, 0.0056, 0.0107, 0.0060, 0.0123, 0.0151, 0.0052], + device='cuda:1'), in_proj_covar=tensor([0.0081, 0.0104, 0.0096, 0.0143, 0.0094, 0.0139, 0.0148, 0.0077], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 00:12:29,405 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.233e+02 3.379e+02 3.882e+02 4.685e+02 8.666e+02, threshold=7.765e+02, percent-clipped=1.0 +2023-03-09 00:12:32,456 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32144.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:12:35,896 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32147.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 00:12:50,386 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9882, 3.6987, 5.1383, 3.1861, 4.2713, 2.7196, 3.0988, 1.9864], + device='cuda:1'), covar=tensor([0.0816, 0.0779, 0.0065, 0.0544, 0.0455, 0.1921, 0.2279, 0.1639], + device='cuda:1'), in_proj_covar=tensor([0.0188, 0.0209, 0.0107, 0.0159, 0.0220, 0.0238, 0.0266, 0.0203], + device='cuda:1'), out_proj_covar=tensor([1.6657e-04, 1.9022e-04, 9.8870e-05, 1.4360e-04, 1.9950e-04, 2.1950e-04, + 2.4200e-04, 1.8790e-04], device='cuda:1') +2023-03-09 00:13:05,881 INFO [train.py:898] (1/4) Epoch 9, batch 3100, loss[loss=0.1934, simple_loss=0.2774, pruned_loss=0.05467, over 18630.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2799, pruned_loss=0.05702, over 3593822.03 frames. ], batch size: 52, lr: 1.24e-02, grad_scale: 8.0 +2023-03-09 00:13:07,841 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.90 vs. limit=5.0 +2023-03-09 00:13:13,302 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32179.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:13:32,007 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32195.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 00:14:05,290 INFO [train.py:898] (1/4) Epoch 9, batch 3150, loss[loss=0.2162, simple_loss=0.2994, pruned_loss=0.06656, over 18314.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2797, pruned_loss=0.0568, over 3605265.77 frames. ], batch size: 54, lr: 1.24e-02, grad_scale: 8.0 +2023-03-09 00:14:18,462 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32234.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:14:28,072 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.206e+02 3.166e+02 3.799e+02 4.714e+02 1.226e+03, threshold=7.598e+02, percent-clipped=2.0 +2023-03-09 00:15:02,115 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32271.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:15:04,046 INFO [train.py:898] (1/4) Epoch 9, batch 3200, loss[loss=0.1887, simple_loss=0.2617, pruned_loss=0.05784, over 18242.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2799, pruned_loss=0.05693, over 3593148.38 frames. ], batch size: 45, lr: 1.24e-02, grad_scale: 8.0 +2023-03-09 00:15:43,099 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32306.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:15:48,096 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4337, 5.1740, 5.5731, 5.3426, 5.3455, 6.1262, 5.7555, 5.4847], + device='cuda:1'), covar=tensor([0.0890, 0.0630, 0.0619, 0.0660, 0.1397, 0.0677, 0.0580, 0.1703], + device='cuda:1'), in_proj_covar=tensor([0.0297, 0.0226, 0.0234, 0.0237, 0.0279, 0.0333, 0.0219, 0.0329], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-09 00:16:02,574 INFO [train.py:898] (1/4) Epoch 9, batch 3250, loss[loss=0.1749, simple_loss=0.2585, pruned_loss=0.04564, over 18492.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2796, pruned_loss=0.05701, over 3597100.51 frames. ], batch size: 47, lr: 1.24e-02, grad_scale: 8.0 +2023-03-09 00:16:24,632 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.580e+02 3.470e+02 4.080e+02 5.186e+02 8.555e+02, threshold=8.161e+02, percent-clipped=3.0 +2023-03-09 00:16:46,488 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32360.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:17:01,477 INFO [train.py:898] (1/4) Epoch 9, batch 3300, loss[loss=0.2231, simple_loss=0.3069, pruned_loss=0.06963, over 18181.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2801, pruned_loss=0.05716, over 3596251.71 frames. ], batch size: 60, lr: 1.24e-02, grad_scale: 8.0 +2023-03-09 00:17:31,027 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3279, 3.3351, 1.6133, 4.2939, 2.6818, 4.2897, 1.9188, 3.6458], + device='cuda:1'), covar=tensor([0.0591, 0.0833, 0.1677, 0.0386, 0.0899, 0.0244, 0.1393, 0.0438], + device='cuda:1'), in_proj_covar=tensor([0.0180, 0.0204, 0.0175, 0.0219, 0.0174, 0.0222, 0.0189, 0.0179], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 00:17:43,085 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32408.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:18:00,711 INFO [train.py:898] (1/4) Epoch 9, batch 3350, loss[loss=0.2018, simple_loss=0.2882, pruned_loss=0.05772, over 18510.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2796, pruned_loss=0.05679, over 3597589.24 frames. ], batch size: 53, lr: 1.24e-02, grad_scale: 8.0 +2023-03-09 00:18:22,502 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.160e+02 3.179e+02 3.976e+02 5.107e+02 1.332e+03, threshold=7.951e+02, percent-clipped=3.0 +2023-03-09 00:18:25,074 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32444.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:18:32,193 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7343, 2.1208, 2.6895, 2.9562, 3.4185, 5.3459, 4.7591, 4.0382], + device='cuda:1'), covar=tensor([0.1023, 0.1839, 0.2050, 0.1085, 0.1518, 0.0072, 0.0323, 0.0414], + device='cuda:1'), in_proj_covar=tensor([0.0224, 0.0281, 0.0290, 0.0239, 0.0350, 0.0171, 0.0245, 0.0192], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 00:18:38,179 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.64 vs. limit=5.0 +2023-03-09 00:18:59,668 INFO [train.py:898] (1/4) Epoch 9, batch 3400, loss[loss=0.1612, simple_loss=0.2406, pruned_loss=0.04086, over 17627.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2792, pruned_loss=0.05675, over 3599747.91 frames. ], batch size: 39, lr: 1.23e-02, grad_scale: 8.0 +2023-03-09 00:19:06,882 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32479.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:19:21,716 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32492.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:19:57,943 INFO [train.py:898] (1/4) Epoch 9, batch 3450, loss[loss=0.2152, simple_loss=0.2849, pruned_loss=0.07277, over 18489.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2793, pruned_loss=0.05687, over 3597871.43 frames. ], batch size: 47, lr: 1.23e-02, grad_scale: 8.0 +2023-03-09 00:20:02,595 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32527.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:20:10,643 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32534.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:20:19,239 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.059e+02 3.448e+02 3.896e+02 4.764e+02 9.293e+02, threshold=7.793e+02, percent-clipped=1.0 +2023-03-09 00:20:54,024 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32571.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:20:56,533 INFO [train.py:898] (1/4) Epoch 9, batch 3500, loss[loss=0.1692, simple_loss=0.2478, pruned_loss=0.04525, over 18161.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2794, pruned_loss=0.05645, over 3610789.77 frames. ], batch size: 44, lr: 1.23e-02, grad_scale: 8.0 +2023-03-09 00:21:06,902 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32582.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:21:07,387 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-03-09 00:21:34,346 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32606.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:21:47,771 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32619.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:21:52,019 INFO [train.py:898] (1/4) Epoch 9, batch 3550, loss[loss=0.1756, simple_loss=0.266, pruned_loss=0.04264, over 18394.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2794, pruned_loss=0.05637, over 3604909.91 frames. ], batch size: 50, lr: 1.23e-02, grad_scale: 8.0 +2023-03-09 00:21:56,649 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7168, 4.4018, 4.6180, 3.4668, 3.6042, 3.5645, 2.3535, 2.2169], + device='cuda:1'), covar=tensor([0.0218, 0.0132, 0.0060, 0.0237, 0.0272, 0.0187, 0.0770, 0.0915], + device='cuda:1'), in_proj_covar=tensor([0.0054, 0.0045, 0.0044, 0.0055, 0.0074, 0.0052, 0.0069, 0.0074], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-09 00:22:12,489 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.120e+02 3.503e+02 4.111e+02 5.019e+02 1.415e+03, threshold=8.222e+02, percent-clipped=4.0 +2023-03-09 00:22:25,463 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32654.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:22:45,819 INFO [train.py:898] (1/4) Epoch 9, batch 3600, loss[loss=0.1924, simple_loss=0.2749, pruned_loss=0.05489, over 18415.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2794, pruned_loss=0.05674, over 3590775.20 frames. ], batch size: 48, lr: 1.23e-02, grad_scale: 8.0 +2023-03-09 00:23:53,332 INFO [train.py:898] (1/4) Epoch 10, batch 0, loss[loss=0.1852, simple_loss=0.2729, pruned_loss=0.04879, over 18539.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2729, pruned_loss=0.04879, over 18539.00 frames. ], batch size: 49, lr: 1.17e-02, grad_scale: 8.0 +2023-03-09 00:23:53,333 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 00:24:05,218 INFO [train.py:932] (1/4) Epoch 10, validation: loss=0.1621, simple_loss=0.2651, pruned_loss=0.02958, over 944034.00 frames. +2023-03-09 00:24:05,219 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 00:24:32,970 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4164, 3.1347, 1.8251, 4.0985, 2.7234, 4.1247, 2.0907, 3.5483], + device='cuda:1'), covar=tensor([0.0519, 0.0795, 0.1384, 0.0458, 0.0808, 0.0243, 0.1176, 0.0409], + device='cuda:1'), in_proj_covar=tensor([0.0182, 0.0207, 0.0175, 0.0221, 0.0175, 0.0221, 0.0187, 0.0180], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 00:24:46,645 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.116e+02 3.572e+02 4.322e+02 5.704e+02 1.376e+03, threshold=8.645e+02, percent-clipped=6.0 +2023-03-09 00:25:02,103 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7261, 2.2037, 4.2067, 4.2293, 2.2901, 4.5596, 3.9485, 2.6139], + device='cuda:1'), covar=tensor([0.0383, 0.2112, 0.0214, 0.0203, 0.1931, 0.0219, 0.0399, 0.1353], + device='cuda:1'), in_proj_covar=tensor([0.0182, 0.0220, 0.0142, 0.0138, 0.0210, 0.0179, 0.0198, 0.0194], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 00:25:03,986 INFO [train.py:898] (1/4) Epoch 10, batch 50, loss[loss=0.175, simple_loss=0.2643, pruned_loss=0.04282, over 18346.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2807, pruned_loss=0.05621, over 802350.95 frames. ], batch size: 46, lr: 1.17e-02, grad_scale: 8.0 +2023-03-09 00:25:16,613 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-03-09 00:25:34,776 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6576, 3.4618, 4.7749, 3.0322, 4.0747, 2.5476, 2.8608, 1.9876], + device='cuda:1'), covar=tensor([0.0943, 0.0848, 0.0073, 0.0588, 0.0571, 0.2142, 0.2240, 0.1721], + device='cuda:1'), in_proj_covar=tensor([0.0191, 0.0207, 0.0109, 0.0162, 0.0224, 0.0239, 0.0267, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 00:25:57,203 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9159, 4.8590, 4.9353, 4.6440, 4.5560, 4.7821, 5.1128, 5.0324], + device='cuda:1'), covar=tensor([0.0054, 0.0076, 0.0084, 0.0103, 0.0072, 0.0102, 0.0075, 0.0096], + device='cuda:1'), in_proj_covar=tensor([0.0075, 0.0055, 0.0056, 0.0070, 0.0060, 0.0081, 0.0068, 0.0069], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 00:26:02,542 INFO [train.py:898] (1/4) Epoch 10, batch 100, loss[loss=0.2276, simple_loss=0.3049, pruned_loss=0.07521, over 18505.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2806, pruned_loss=0.05673, over 1421009.66 frames. ], batch size: 53, lr: 1.17e-02, grad_scale: 8.0 +2023-03-09 00:26:44,169 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.347e+02 3.358e+02 3.908e+02 4.733e+02 8.989e+02, threshold=7.816e+02, percent-clipped=2.0 +2023-03-09 00:26:49,131 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1344, 2.5542, 2.2796, 2.5776, 3.2955, 3.2474, 2.8313, 2.6591], + device='cuda:1'), covar=tensor([0.0203, 0.0295, 0.0649, 0.0375, 0.0200, 0.0194, 0.0379, 0.0342], + device='cuda:1'), in_proj_covar=tensor([0.0110, 0.0099, 0.0146, 0.0132, 0.0094, 0.0081, 0.0127, 0.0119], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 00:27:01,162 INFO [train.py:898] (1/4) Epoch 10, batch 150, loss[loss=0.1808, simple_loss=0.2686, pruned_loss=0.04648, over 18504.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2821, pruned_loss=0.05754, over 1877978.12 frames. ], batch size: 53, lr: 1.17e-02, grad_scale: 8.0 +2023-03-09 00:28:00,323 INFO [train.py:898] (1/4) Epoch 10, batch 200, loss[loss=0.2273, simple_loss=0.3008, pruned_loss=0.07684, over 18456.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2799, pruned_loss=0.05658, over 2266817.94 frames. ], batch size: 59, lr: 1.17e-02, grad_scale: 8.0 +2023-03-09 00:28:42,826 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.147e+02 3.153e+02 3.905e+02 4.894e+02 1.439e+03, threshold=7.811e+02, percent-clipped=2.0 +2023-03-09 00:28:51,852 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.3219, 5.4157, 2.7110, 5.2065, 5.1507, 5.4895, 5.3371, 2.9394], + device='cuda:1'), covar=tensor([0.0123, 0.0048, 0.0686, 0.0061, 0.0051, 0.0039, 0.0069, 0.0787], + device='cuda:1'), in_proj_covar=tensor([0.0073, 0.0061, 0.0085, 0.0076, 0.0070, 0.0060, 0.0072, 0.0087], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 00:28:59,424 INFO [train.py:898] (1/4) Epoch 10, batch 250, loss[loss=0.1892, simple_loss=0.2813, pruned_loss=0.04856, over 18574.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2776, pruned_loss=0.0553, over 2567822.80 frames. ], batch size: 54, lr: 1.17e-02, grad_scale: 4.0 +2023-03-09 00:29:10,002 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32966.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:29:15,946 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6089, 1.9848, 2.7083, 2.6074, 3.3418, 5.0107, 4.5715, 3.9675], + device='cuda:1'), covar=tensor([0.1089, 0.1988, 0.2120, 0.1340, 0.1688, 0.0079, 0.0342, 0.0409], + device='cuda:1'), in_proj_covar=tensor([0.0226, 0.0283, 0.0293, 0.0241, 0.0350, 0.0171, 0.0245, 0.0192], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 00:29:41,208 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5956, 3.6730, 5.3151, 4.3486, 3.4634, 3.2565, 4.5901, 5.3975], + device='cuda:1'), covar=tensor([0.0771, 0.1656, 0.0064, 0.0320, 0.0744, 0.0890, 0.0269, 0.0136], + device='cuda:1'), in_proj_covar=tensor([0.0134, 0.0234, 0.0093, 0.0155, 0.0173, 0.0172, 0.0164, 0.0131], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 00:29:58,474 INFO [train.py:898] (1/4) Epoch 10, batch 300, loss[loss=0.1861, simple_loss=0.2626, pruned_loss=0.05474, over 18222.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.278, pruned_loss=0.05555, over 2800055.68 frames. ], batch size: 45, lr: 1.16e-02, grad_scale: 4.0 +2023-03-09 00:30:21,567 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33027.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:30:26,630 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9147, 3.7532, 5.0930, 2.9924, 4.1479, 2.5749, 3.1530, 1.8296], + device='cuda:1'), covar=tensor([0.0840, 0.0764, 0.0066, 0.0615, 0.0590, 0.2173, 0.2226, 0.1773], + device='cuda:1'), in_proj_covar=tensor([0.0189, 0.0206, 0.0109, 0.0160, 0.0221, 0.0238, 0.0265, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 00:30:33,468 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4910, 2.6335, 3.9087, 3.8216, 2.3822, 4.3081, 3.9405, 2.7468], + device='cuda:1'), covar=tensor([0.0376, 0.1300, 0.0272, 0.0234, 0.1564, 0.0181, 0.0309, 0.0929], + device='cuda:1'), in_proj_covar=tensor([0.0177, 0.0214, 0.0140, 0.0134, 0.0206, 0.0176, 0.0195, 0.0190], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 00:30:40,680 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.108e+02 3.425e+02 4.260e+02 4.893e+02 1.115e+03, threshold=8.520e+02, percent-clipped=1.0 +2023-03-09 00:30:57,414 INFO [train.py:898] (1/4) Epoch 10, batch 350, loss[loss=0.2116, simple_loss=0.2962, pruned_loss=0.06349, over 18352.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.278, pruned_loss=0.05577, over 2987671.14 frames. ], batch size: 56, lr: 1.16e-02, grad_scale: 4.0 +2023-03-09 00:31:11,183 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33069.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:31:27,174 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5967, 6.0915, 5.5422, 5.8790, 5.7231, 5.6298, 6.2451, 6.1391], + device='cuda:1'), covar=tensor([0.1170, 0.0617, 0.0377, 0.0664, 0.1257, 0.0691, 0.0464, 0.0528], + device='cuda:1'), in_proj_covar=tensor([0.0501, 0.0407, 0.0304, 0.0453, 0.0608, 0.0453, 0.0575, 0.0423], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 00:31:38,440 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-09 00:31:55,109 INFO [train.py:898] (1/4) Epoch 10, batch 400, loss[loss=0.201, simple_loss=0.2861, pruned_loss=0.058, over 18374.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.278, pruned_loss=0.05587, over 3116036.42 frames. ], batch size: 50, lr: 1.16e-02, grad_scale: 8.0 +2023-03-09 00:32:22,384 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33130.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:32:37,086 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.379e+02 3.296e+02 4.038e+02 4.885e+02 1.161e+03, threshold=8.076e+02, percent-clipped=2.0 +2023-03-09 00:32:51,697 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33155.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:32:53,556 INFO [train.py:898] (1/4) Epoch 10, batch 450, loss[loss=0.198, simple_loss=0.2755, pruned_loss=0.06026, over 18490.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2777, pruned_loss=0.05554, over 3227528.79 frames. ], batch size: 51, lr: 1.16e-02, grad_scale: 8.0 +2023-03-09 00:33:52,244 INFO [train.py:898] (1/4) Epoch 10, batch 500, loss[loss=0.2172, simple_loss=0.3007, pruned_loss=0.06683, over 18228.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2771, pruned_loss=0.05538, over 3310775.92 frames. ], batch size: 60, lr: 1.16e-02, grad_scale: 8.0 +2023-03-09 00:34:03,218 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33216.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:34:33,502 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.970e+02 3.300e+02 3.836e+02 4.921e+02 1.033e+03, threshold=7.671e+02, percent-clipped=3.0 +2023-03-09 00:34:49,828 INFO [train.py:898] (1/4) Epoch 10, batch 550, loss[loss=0.1677, simple_loss=0.2426, pruned_loss=0.04637, over 18492.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2765, pruned_loss=0.05541, over 3374826.74 frames. ], batch size: 44, lr: 1.16e-02, grad_scale: 8.0 +2023-03-09 00:35:48,947 INFO [train.py:898] (1/4) Epoch 10, batch 600, loss[loss=0.1737, simple_loss=0.2627, pruned_loss=0.04235, over 18545.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.276, pruned_loss=0.05489, over 3434561.92 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 8.0 +2023-03-09 00:35:49,471 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1188, 4.0714, 5.3919, 3.1926, 4.5397, 3.0205, 3.2536, 2.0767], + device='cuda:1'), covar=tensor([0.0794, 0.0681, 0.0053, 0.0593, 0.0470, 0.1885, 0.2013, 0.1630], + device='cuda:1'), in_proj_covar=tensor([0.0194, 0.0211, 0.0111, 0.0164, 0.0227, 0.0245, 0.0271, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 00:35:50,354 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3267, 5.1038, 5.5968, 5.5725, 5.2350, 6.1522, 5.7482, 5.4168], + device='cuda:1'), covar=tensor([0.0928, 0.0636, 0.0585, 0.0591, 0.1373, 0.0621, 0.0505, 0.1499], + device='cuda:1'), in_proj_covar=tensor([0.0293, 0.0224, 0.0235, 0.0240, 0.0275, 0.0330, 0.0217, 0.0328], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-09 00:36:06,532 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33322.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:36:08,319 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3702, 5.1214, 5.5679, 5.5843, 5.2809, 6.1216, 5.7366, 5.5481], + device='cuda:1'), covar=tensor([0.0984, 0.0592, 0.0578, 0.0558, 0.1361, 0.0661, 0.0581, 0.1490], + device='cuda:1'), in_proj_covar=tensor([0.0294, 0.0224, 0.0236, 0.0240, 0.0275, 0.0332, 0.0218, 0.0328], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-09 00:36:30,345 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.217e+02 3.224e+02 3.633e+02 4.391e+02 9.720e+02, threshold=7.266e+02, percent-clipped=2.0 +2023-03-09 00:36:35,560 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5750, 6.0842, 5.5525, 5.9074, 5.7302, 5.6091, 6.1597, 6.1003], + device='cuda:1'), covar=tensor([0.1031, 0.0593, 0.0400, 0.0591, 0.1210, 0.0620, 0.0494, 0.0536], + device='cuda:1'), in_proj_covar=tensor([0.0497, 0.0408, 0.0305, 0.0452, 0.0604, 0.0454, 0.0578, 0.0432], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 00:36:46,440 INFO [train.py:898] (1/4) Epoch 10, batch 650, loss[loss=0.2261, simple_loss=0.3111, pruned_loss=0.07058, over 18238.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2762, pruned_loss=0.05482, over 3467358.05 frames. ], batch size: 60, lr: 1.16e-02, grad_scale: 8.0 +2023-03-09 00:37:02,042 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-09 00:37:09,276 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3761, 5.3407, 4.8873, 5.3400, 5.3177, 4.6773, 5.1824, 4.9354], + device='cuda:1'), covar=tensor([0.0369, 0.0419, 0.1337, 0.0627, 0.0478, 0.0410, 0.0387, 0.1060], + device='cuda:1'), in_proj_covar=tensor([0.0383, 0.0446, 0.0585, 0.0346, 0.0329, 0.0403, 0.0424, 0.0558], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 00:37:18,030 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1715, 4.3602, 2.2634, 4.3387, 5.1799, 2.5006, 3.7405, 3.8622], + device='cuda:1'), covar=tensor([0.0076, 0.0880, 0.1521, 0.0492, 0.0040, 0.1218, 0.0639, 0.0705], + device='cuda:1'), in_proj_covar=tensor([0.0107, 0.0212, 0.0181, 0.0182, 0.0083, 0.0168, 0.0193, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 00:37:45,674 INFO [train.py:898] (1/4) Epoch 10, batch 700, loss[loss=0.1758, simple_loss=0.2573, pruned_loss=0.04712, over 18366.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2762, pruned_loss=0.0549, over 3498305.19 frames. ], batch size: 46, lr: 1.16e-02, grad_scale: 8.0 +2023-03-09 00:37:50,543 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1813, 5.2623, 4.5143, 5.2070, 5.1748, 4.5768, 5.0416, 4.7847], + device='cuda:1'), covar=tensor([0.0671, 0.0598, 0.2152, 0.0961, 0.0753, 0.0559, 0.0638, 0.1131], + device='cuda:1'), in_proj_covar=tensor([0.0384, 0.0442, 0.0583, 0.0346, 0.0328, 0.0401, 0.0423, 0.0556], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 00:38:07,276 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33425.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:38:17,174 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.26 vs. limit=5.0 +2023-03-09 00:38:27,846 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.134e+02 3.209e+02 3.744e+02 4.761e+02 1.041e+03, threshold=7.488e+02, percent-clipped=6.0 +2023-03-09 00:38:44,042 INFO [train.py:898] (1/4) Epoch 10, batch 750, loss[loss=0.1961, simple_loss=0.291, pruned_loss=0.05053, over 18332.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2768, pruned_loss=0.05484, over 3519242.57 frames. ], batch size: 56, lr: 1.16e-02, grad_scale: 8.0 +2023-03-09 00:39:00,676 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3733, 5.0821, 5.5242, 5.4298, 5.3126, 6.0630, 5.7262, 5.4769], + device='cuda:1'), covar=tensor([0.0892, 0.0642, 0.0625, 0.0674, 0.1150, 0.0631, 0.0546, 0.1467], + device='cuda:1'), in_proj_covar=tensor([0.0292, 0.0226, 0.0237, 0.0237, 0.0276, 0.0333, 0.0217, 0.0323], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-09 00:39:01,850 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33472.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:39:42,273 INFO [train.py:898] (1/4) Epoch 10, batch 800, loss[loss=0.2044, simple_loss=0.29, pruned_loss=0.05946, over 18559.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2764, pruned_loss=0.05477, over 3540608.18 frames. ], batch size: 54, lr: 1.16e-02, grad_scale: 8.0 +2023-03-09 00:39:47,423 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33511.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:40:13,167 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9599, 4.5508, 4.9256, 3.5961, 4.0056, 3.7756, 2.7742, 2.2446], + device='cuda:1'), covar=tensor([0.0190, 0.0156, 0.0054, 0.0249, 0.0275, 0.0193, 0.0622, 0.0862], + device='cuda:1'), in_proj_covar=tensor([0.0056, 0.0046, 0.0044, 0.0056, 0.0076, 0.0054, 0.0069, 0.0075], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-09 00:40:13,209 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33533.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:40:24,515 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.088e+02 3.240e+02 4.102e+02 4.783e+02 1.200e+03, threshold=8.205e+02, percent-clipped=1.0 +2023-03-09 00:40:40,563 INFO [train.py:898] (1/4) Epoch 10, batch 850, loss[loss=0.1811, simple_loss=0.2689, pruned_loss=0.04662, over 18536.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2774, pruned_loss=0.05512, over 3555486.87 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 8.0 +2023-03-09 00:41:39,909 INFO [train.py:898] (1/4) Epoch 10, batch 900, loss[loss=0.2084, simple_loss=0.2916, pruned_loss=0.06267, over 18307.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2771, pruned_loss=0.05483, over 3571552.91 frames. ], batch size: 54, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:41:44,931 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7419, 3.7345, 5.2146, 4.4695, 3.3640, 3.2933, 4.6424, 5.4194], + device='cuda:1'), covar=tensor([0.0790, 0.1805, 0.0121, 0.0318, 0.0833, 0.0960, 0.0297, 0.0137], + device='cuda:1'), in_proj_covar=tensor([0.0134, 0.0231, 0.0093, 0.0155, 0.0172, 0.0172, 0.0165, 0.0134], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 00:41:57,619 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33622.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:42:22,394 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.227e+02 3.532e+02 4.038e+02 4.740e+02 8.485e+02, threshold=8.075e+02, percent-clipped=1.0 +2023-03-09 00:42:38,558 INFO [train.py:898] (1/4) Epoch 10, batch 950, loss[loss=0.1739, simple_loss=0.2625, pruned_loss=0.04263, over 18374.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2764, pruned_loss=0.0545, over 3577999.49 frames. ], batch size: 50, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:42:50,622 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4697, 5.3845, 5.0003, 5.3925, 5.3719, 4.7543, 5.3060, 4.9469], + device='cuda:1'), covar=tensor([0.0351, 0.0412, 0.1301, 0.0685, 0.0511, 0.0377, 0.0351, 0.0959], + device='cuda:1'), in_proj_covar=tensor([0.0390, 0.0454, 0.0605, 0.0358, 0.0334, 0.0413, 0.0435, 0.0569], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 00:42:53,742 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33670.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:42:56,732 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-09 00:43:01,737 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33677.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:43:36,487 INFO [train.py:898] (1/4) Epoch 10, batch 1000, loss[loss=0.2107, simple_loss=0.2885, pruned_loss=0.06646, over 18313.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2769, pruned_loss=0.05505, over 3582269.08 frames. ], batch size: 54, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:43:57,343 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33725.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:44:13,392 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33738.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 00:44:18,700 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.319e+02 3.380e+02 3.936e+02 4.816e+02 9.502e+02, threshold=7.872e+02, percent-clipped=1.0 +2023-03-09 00:44:35,202 INFO [train.py:898] (1/4) Epoch 10, batch 1050, loss[loss=0.1771, simple_loss=0.2671, pruned_loss=0.04351, over 18407.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2772, pruned_loss=0.05521, over 3575838.55 frames. ], batch size: 50, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:44:53,665 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33773.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:44:56,636 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.51 vs. limit=5.0 +2023-03-09 00:45:04,501 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.80 vs. limit=5.0 +2023-03-09 00:45:18,930 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5738, 5.5217, 5.0123, 5.5297, 5.4369, 4.7372, 5.3639, 5.0353], + device='cuda:1'), covar=tensor([0.0390, 0.0431, 0.1426, 0.0751, 0.0556, 0.0453, 0.0422, 0.1027], + device='cuda:1'), in_proj_covar=tensor([0.0387, 0.0453, 0.0598, 0.0356, 0.0331, 0.0411, 0.0431, 0.0565], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 00:45:19,424 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.02 vs. limit=2.0 +2023-03-09 00:45:26,649 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-03-09 00:45:34,466 INFO [train.py:898] (1/4) Epoch 10, batch 1100, loss[loss=0.2009, simple_loss=0.2901, pruned_loss=0.05589, over 15980.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2764, pruned_loss=0.05502, over 3578051.49 frames. ], batch size: 95, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:45:39,287 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33811.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:45:58,240 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-09 00:45:58,710 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33828.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:46:16,690 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.260e+02 3.207e+02 4.126e+02 4.735e+02 1.316e+03, threshold=8.252e+02, percent-clipped=4.0 +2023-03-09 00:46:32,947 INFO [train.py:898] (1/4) Epoch 10, batch 1150, loss[loss=0.1949, simple_loss=0.2819, pruned_loss=0.05395, over 18264.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2769, pruned_loss=0.05495, over 3584731.04 frames. ], batch size: 60, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:46:36,038 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33859.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:47:27,283 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3583, 6.0264, 5.4855, 5.8336, 5.5362, 5.5926, 6.0680, 6.0418], + device='cuda:1'), covar=tensor([0.1187, 0.0752, 0.0406, 0.0765, 0.1520, 0.0682, 0.0545, 0.0608], + device='cuda:1'), in_proj_covar=tensor([0.0506, 0.0417, 0.0311, 0.0463, 0.0621, 0.0456, 0.0580, 0.0432], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 00:47:31,495 INFO [train.py:898] (1/4) Epoch 10, batch 1200, loss[loss=0.1933, simple_loss=0.2852, pruned_loss=0.05072, over 18504.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2768, pruned_loss=0.05474, over 3598964.08 frames. ], batch size: 51, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:48:09,188 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33939.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:48:13,370 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.163e+02 3.321e+02 3.896e+02 4.849e+02 9.830e+02, threshold=7.793e+02, percent-clipped=3.0 +2023-03-09 00:48:20,898 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8629, 3.8132, 5.3301, 4.6171, 3.3710, 3.0413, 4.4221, 5.4305], + device='cuda:1'), covar=tensor([0.0787, 0.1548, 0.0071, 0.0263, 0.0829, 0.1076, 0.0363, 0.0139], + device='cuda:1'), in_proj_covar=tensor([0.0137, 0.0235, 0.0096, 0.0157, 0.0176, 0.0174, 0.0167, 0.0136], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 00:48:29,980 INFO [train.py:898] (1/4) Epoch 10, batch 1250, loss[loss=0.2231, simple_loss=0.3039, pruned_loss=0.07111, over 16972.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2766, pruned_loss=0.05486, over 3591805.74 frames. ], batch size: 78, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:49:24,356 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34000.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:49:32,611 INFO [train.py:898] (1/4) Epoch 10, batch 1300, loss[loss=0.2172, simple_loss=0.3014, pruned_loss=0.06651, over 16360.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2783, pruned_loss=0.05562, over 3587981.61 frames. ], batch size: 94, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:49:36,879 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-09 00:50:02,649 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34033.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 00:50:14,343 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.927e+02 3.198e+02 3.729e+02 4.411e+02 1.072e+03, threshold=7.459e+02, percent-clipped=4.0 +2023-03-09 00:50:15,947 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34044.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:50:30,798 INFO [train.py:898] (1/4) Epoch 10, batch 1350, loss[loss=0.1788, simple_loss=0.2672, pruned_loss=0.04524, over 18370.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2784, pruned_loss=0.05546, over 3597155.79 frames. ], batch size: 55, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:51:17,704 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-09 00:51:27,363 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34105.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:51:29,274 INFO [train.py:898] (1/4) Epoch 10, batch 1400, loss[loss=0.1951, simple_loss=0.2825, pruned_loss=0.05384, over 18400.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2789, pruned_loss=0.05565, over 3599498.35 frames. ], batch size: 52, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:51:54,992 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34128.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:52:11,992 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.227e+02 3.479e+02 4.188e+02 5.259e+02 1.052e+03, threshold=8.376e+02, percent-clipped=5.0 +2023-03-09 00:52:15,170 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34145.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:52:28,411 INFO [train.py:898] (1/4) Epoch 10, batch 1450, loss[loss=0.1832, simple_loss=0.2568, pruned_loss=0.05474, over 17613.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2791, pruned_loss=0.05603, over 3595279.36 frames. ], batch size: 39, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:52:50,504 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34176.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:53:26,187 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34206.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:53:26,962 INFO [train.py:898] (1/4) Epoch 10, batch 1500, loss[loss=0.1893, simple_loss=0.2836, pruned_loss=0.04745, over 18398.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2784, pruned_loss=0.0557, over 3597720.19 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 8.0 +2023-03-09 00:53:53,295 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.13 vs. limit=5.0 +2023-03-09 00:54:08,718 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.894e+02 3.400e+02 4.165e+02 5.471e+02 1.005e+03, threshold=8.329e+02, percent-clipped=3.0 +2023-03-09 00:54:25,066 INFO [train.py:898] (1/4) Epoch 10, batch 1550, loss[loss=0.1815, simple_loss=0.2719, pruned_loss=0.04554, over 18400.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2787, pruned_loss=0.05597, over 3580576.79 frames. ], batch size: 52, lr: 1.14e-02, grad_scale: 8.0 +2023-03-09 00:55:09,443 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34295.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:55:23,417 INFO [train.py:898] (1/4) Epoch 10, batch 1600, loss[loss=0.2248, simple_loss=0.3092, pruned_loss=0.07017, over 18372.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2792, pruned_loss=0.05579, over 3589462.48 frames. ], batch size: 56, lr: 1.14e-02, grad_scale: 8.0 +2023-03-09 00:55:41,245 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2996, 4.3418, 4.3740, 4.2085, 4.1150, 4.2176, 4.5050, 4.4876], + device='cuda:1'), covar=tensor([0.0068, 0.0074, 0.0069, 0.0087, 0.0077, 0.0115, 0.0065, 0.0094], + device='cuda:1'), in_proj_covar=tensor([0.0077, 0.0055, 0.0057, 0.0072, 0.0061, 0.0084, 0.0070, 0.0070], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 00:55:53,844 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34333.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:56:05,597 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.752e+02 3.318e+02 3.747e+02 4.447e+02 1.015e+03, threshold=7.495e+02, percent-clipped=2.0 +2023-03-09 00:56:21,873 INFO [train.py:898] (1/4) Epoch 10, batch 1650, loss[loss=0.2085, simple_loss=0.3085, pruned_loss=0.05423, over 18359.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2781, pruned_loss=0.05544, over 3585018.59 frames. ], batch size: 55, lr: 1.14e-02, grad_scale: 8.0 +2023-03-09 00:56:49,658 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34381.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:57:11,874 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34400.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:57:20,095 INFO [train.py:898] (1/4) Epoch 10, batch 1700, loss[loss=0.2453, simple_loss=0.3142, pruned_loss=0.08825, over 12597.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2787, pruned_loss=0.05573, over 3580705.45 frames. ], batch size: 129, lr: 1.14e-02, grad_scale: 4.0 +2023-03-09 00:57:27,480 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0598, 3.9171, 5.3190, 2.9472, 4.3680, 2.8154, 3.1711, 2.1172], + device='cuda:1'), covar=tensor([0.0843, 0.0699, 0.0061, 0.0727, 0.0607, 0.2091, 0.2374, 0.1640], + device='cuda:1'), in_proj_covar=tensor([0.0191, 0.0212, 0.0112, 0.0162, 0.0225, 0.0242, 0.0272, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 00:57:49,573 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-03-09 00:58:03,528 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.339e+02 3.311e+02 3.796e+02 4.465e+02 1.114e+03, threshold=7.593e+02, percent-clipped=2.0 +2023-03-09 00:58:18,864 INFO [train.py:898] (1/4) Epoch 10, batch 1750, loss[loss=0.1714, simple_loss=0.2461, pruned_loss=0.04837, over 18408.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2781, pruned_loss=0.05523, over 3582560.44 frames. ], batch size: 43, lr: 1.14e-02, grad_scale: 4.0 +2023-03-09 00:59:11,441 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34501.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:59:18,090 INFO [train.py:898] (1/4) Epoch 10, batch 1800, loss[loss=0.1835, simple_loss=0.271, pruned_loss=0.04801, over 18362.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2774, pruned_loss=0.05507, over 3587408.42 frames. ], batch size: 50, lr: 1.14e-02, grad_scale: 4.0 +2023-03-09 01:00:01,443 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.848e+02 3.049e+02 3.588e+02 4.503e+02 1.027e+03, threshold=7.176e+02, percent-clipped=5.0 +2023-03-09 01:00:16,832 INFO [train.py:898] (1/4) Epoch 10, batch 1850, loss[loss=0.1897, simple_loss=0.2782, pruned_loss=0.05054, over 18320.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2756, pruned_loss=0.05424, over 3592561.11 frames. ], batch size: 54, lr: 1.14e-02, grad_scale: 4.0 +2023-03-09 01:00:17,278 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6441, 4.2964, 4.4744, 3.1570, 3.5190, 3.4977, 2.4333, 2.1840], + device='cuda:1'), covar=tensor([0.0235, 0.0140, 0.0072, 0.0286, 0.0365, 0.0185, 0.0776, 0.0856], + device='cuda:1'), in_proj_covar=tensor([0.0058, 0.0046, 0.0045, 0.0057, 0.0078, 0.0055, 0.0071, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-09 01:00:33,036 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-03-09 01:00:35,120 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-09 01:00:59,504 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34593.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:01:00,615 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3719, 5.9790, 5.4191, 5.7108, 5.4785, 5.4739, 6.0261, 5.9754], + device='cuda:1'), covar=tensor([0.1235, 0.0628, 0.0473, 0.0719, 0.1470, 0.0675, 0.0521, 0.0530], + device='cuda:1'), in_proj_covar=tensor([0.0523, 0.0425, 0.0325, 0.0469, 0.0641, 0.0471, 0.0599, 0.0440], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 01:01:01,830 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34595.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:01:15,598 INFO [train.py:898] (1/4) Epoch 10, batch 1900, loss[loss=0.2469, simple_loss=0.3151, pruned_loss=0.08934, over 12630.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2765, pruned_loss=0.05489, over 3567459.31 frames. ], batch size: 129, lr: 1.14e-02, grad_scale: 4.0 +2023-03-09 01:01:33,788 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34622.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:01:37,336 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-09 01:01:38,214 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34626.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:01:58,495 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34643.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:01:59,387 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.282e+02 3.244e+02 3.774e+02 4.780e+02 1.001e+03, threshold=7.549e+02, percent-clipped=4.0 +2023-03-09 01:02:11,278 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34654.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:02:12,836 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-09 01:02:14,314 INFO [train.py:898] (1/4) Epoch 10, batch 1950, loss[loss=0.1924, simple_loss=0.284, pruned_loss=0.05038, over 18313.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2765, pruned_loss=0.05475, over 3587460.81 frames. ], batch size: 54, lr: 1.14e-02, grad_scale: 4.0 +2023-03-09 01:02:22,443 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.70 vs. limit=2.0 +2023-03-09 01:02:38,450 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.59 vs. limit=5.0 +2023-03-09 01:02:44,918 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34683.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 01:02:49,882 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34687.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:02:50,014 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0919, 4.1742, 5.2036, 3.1910, 4.4113, 2.7349, 3.2086, 2.0091], + device='cuda:1'), covar=tensor([0.0774, 0.0616, 0.0069, 0.0592, 0.0511, 0.2107, 0.2100, 0.1746], + device='cuda:1'), in_proj_covar=tensor([0.0191, 0.0211, 0.0111, 0.0163, 0.0226, 0.0244, 0.0274, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 01:03:05,162 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34700.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:03:12,494 INFO [train.py:898] (1/4) Epoch 10, batch 2000, loss[loss=0.2213, simple_loss=0.3081, pruned_loss=0.0673, over 18483.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2776, pruned_loss=0.05542, over 3575544.71 frames. ], batch size: 59, lr: 1.14e-02, grad_scale: 8.0 +2023-03-09 01:03:16,407 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34710.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 01:03:56,810 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.230e+02 3.366e+02 4.037e+02 4.949e+02 9.171e+02, threshold=8.073e+02, percent-clipped=4.0 +2023-03-09 01:04:01,496 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34748.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:04:10,624 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2370, 4.9895, 5.4166, 5.2772, 5.1571, 5.9496, 5.5093, 5.2777], + device='cuda:1'), covar=tensor([0.0857, 0.0591, 0.0641, 0.0580, 0.1303, 0.0677, 0.0543, 0.1522], + device='cuda:1'), in_proj_covar=tensor([0.0297, 0.0223, 0.0241, 0.0237, 0.0278, 0.0336, 0.0223, 0.0330], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-09 01:04:11,471 INFO [train.py:898] (1/4) Epoch 10, batch 2050, loss[loss=0.2017, simple_loss=0.2849, pruned_loss=0.05927, over 18375.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2778, pruned_loss=0.0556, over 3581067.44 frames. ], batch size: 56, lr: 1.14e-02, grad_scale: 8.0 +2023-03-09 01:04:14,355 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6635, 1.9928, 2.6073, 2.7394, 3.5167, 5.1230, 4.6322, 3.8329], + device='cuda:1'), covar=tensor([0.1134, 0.1934, 0.2339, 0.1297, 0.1539, 0.0092, 0.0333, 0.0501], + device='cuda:1'), in_proj_covar=tensor([0.0232, 0.0288, 0.0298, 0.0244, 0.0356, 0.0180, 0.0253, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 01:04:28,630 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34771.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 01:05:00,508 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0130, 4.6825, 2.6317, 4.5632, 4.5064, 4.7257, 4.5740, 2.6534], + device='cuda:1'), covar=tensor([0.0132, 0.0065, 0.0683, 0.0096, 0.0066, 0.0064, 0.0088, 0.0895], + device='cuda:1'), in_proj_covar=tensor([0.0073, 0.0064, 0.0086, 0.0079, 0.0073, 0.0063, 0.0074, 0.0089], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 01:05:04,250 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34801.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:05:10,832 INFO [train.py:898] (1/4) Epoch 10, batch 2100, loss[loss=0.1948, simple_loss=0.2864, pruned_loss=0.05164, over 17738.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2785, pruned_loss=0.05576, over 3580789.16 frames. ], batch size: 70, lr: 1.14e-02, grad_scale: 8.0 +2023-03-09 01:05:14,670 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2087, 4.8976, 5.3818, 5.2722, 5.1025, 5.9713, 5.5227, 5.1599], + device='cuda:1'), covar=tensor([0.0978, 0.0647, 0.0644, 0.0685, 0.1469, 0.0720, 0.0705, 0.1722], + device='cuda:1'), in_proj_covar=tensor([0.0301, 0.0225, 0.0244, 0.0240, 0.0280, 0.0337, 0.0225, 0.0336], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-09 01:05:50,138 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.3937, 4.9956, 5.0039, 4.9723, 4.5448, 4.8574, 4.2754, 4.8408], + device='cuda:1'), covar=tensor([0.0263, 0.0279, 0.0185, 0.0364, 0.0343, 0.0230, 0.1155, 0.0304], + device='cuda:1'), in_proj_covar=tensor([0.0165, 0.0210, 0.0195, 0.0229, 0.0208, 0.0213, 0.0272, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:1') +2023-03-09 01:05:54,600 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.339e+02 3.283e+02 3.862e+02 4.779e+02 1.024e+03, threshold=7.723e+02, percent-clipped=2.0 +2023-03-09 01:05:57,912 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2557, 5.3376, 2.8985, 5.1534, 5.0604, 5.3590, 5.1836, 2.6061], + device='cuda:1'), covar=tensor([0.0130, 0.0050, 0.0702, 0.0079, 0.0059, 0.0068, 0.0082, 0.0985], + device='cuda:1'), in_proj_covar=tensor([0.0074, 0.0063, 0.0086, 0.0079, 0.0073, 0.0063, 0.0075, 0.0089], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 01:06:01,073 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34849.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:06:09,704 INFO [train.py:898] (1/4) Epoch 10, batch 2150, loss[loss=0.174, simple_loss=0.2522, pruned_loss=0.04789, over 18413.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2792, pruned_loss=0.05602, over 3571542.18 frames. ], batch size: 48, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:06:30,352 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6757, 2.0066, 2.7014, 2.7095, 3.3859, 5.2583, 4.5904, 3.9998], + device='cuda:1'), covar=tensor([0.1133, 0.1967, 0.2323, 0.1334, 0.1650, 0.0076, 0.0353, 0.0437], + device='cuda:1'), in_proj_covar=tensor([0.0232, 0.0289, 0.0301, 0.0244, 0.0355, 0.0180, 0.0254, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 01:06:42,250 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5503, 2.5979, 2.7055, 2.6876, 3.5391, 3.4322, 3.1153, 2.8880], + device='cuda:1'), covar=tensor([0.0179, 0.0315, 0.0580, 0.0384, 0.0158, 0.0171, 0.0311, 0.0308], + device='cuda:1'), in_proj_covar=tensor([0.0116, 0.0104, 0.0153, 0.0136, 0.0100, 0.0086, 0.0132, 0.0128], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 01:06:48,842 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4422, 5.0159, 5.5715, 5.5382, 5.2892, 6.1885, 5.8039, 5.5373], + device='cuda:1'), covar=tensor([0.0883, 0.0630, 0.0617, 0.0591, 0.1432, 0.0630, 0.0512, 0.1525], + device='cuda:1'), in_proj_covar=tensor([0.0297, 0.0222, 0.0241, 0.0237, 0.0278, 0.0331, 0.0220, 0.0328], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-09 01:07:07,668 INFO [train.py:898] (1/4) Epoch 10, batch 2200, loss[loss=0.1968, simple_loss=0.2814, pruned_loss=0.05609, over 18223.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2787, pruned_loss=0.05574, over 3574471.45 frames. ], batch size: 60, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:07:18,371 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-09 01:07:50,020 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.072e+02 3.371e+02 4.058e+02 4.999e+02 1.282e+03, threshold=8.115e+02, percent-clipped=5.0 +2023-03-09 01:07:51,447 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5049, 5.1841, 5.6818, 5.4813, 5.3843, 6.2170, 5.8183, 5.5962], + device='cuda:1'), covar=tensor([0.0954, 0.0654, 0.0614, 0.0577, 0.1359, 0.0611, 0.0478, 0.1532], + device='cuda:1'), in_proj_covar=tensor([0.0298, 0.0225, 0.0242, 0.0239, 0.0278, 0.0336, 0.0221, 0.0328], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-09 01:07:57,100 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34949.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:08:05,879 INFO [train.py:898] (1/4) Epoch 10, batch 2250, loss[loss=0.1607, simple_loss=0.2437, pruned_loss=0.03878, over 18503.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2783, pruned_loss=0.0557, over 3564999.93 frames. ], batch size: 44, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:08:29,633 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34978.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 01:08:34,774 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34982.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:09:05,056 INFO [train.py:898] (1/4) Epoch 10, batch 2300, loss[loss=0.2419, simple_loss=0.3105, pruned_loss=0.08668, over 13078.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2768, pruned_loss=0.05516, over 3575764.68 frames. ], batch size: 130, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:09:36,341 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-09 01:09:39,448 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7491, 4.9260, 3.9192, 4.7376, 4.8805, 4.3782, 4.6483, 4.3492], + device='cuda:1'), covar=tensor([0.1059, 0.0791, 0.3337, 0.1322, 0.0811, 0.0625, 0.0920, 0.1530], + device='cuda:1'), in_proj_covar=tensor([0.0394, 0.0450, 0.0594, 0.0351, 0.0333, 0.0409, 0.0435, 0.0563], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 01:09:48,165 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.223e+02 3.414e+02 4.118e+02 5.165e+02 1.398e+03, threshold=8.236e+02, percent-clipped=7.0 +2023-03-09 01:09:56,635 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4595, 6.0792, 5.4835, 5.8121, 5.6283, 5.4712, 6.1128, 6.0512], + device='cuda:1'), covar=tensor([0.1217, 0.0647, 0.0488, 0.0719, 0.1440, 0.0715, 0.0522, 0.0676], + device='cuda:1'), in_proj_covar=tensor([0.0513, 0.0419, 0.0322, 0.0461, 0.0631, 0.0466, 0.0593, 0.0446], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 01:10:04,319 INFO [train.py:898] (1/4) Epoch 10, batch 2350, loss[loss=0.1642, simple_loss=0.2416, pruned_loss=0.04342, over 17706.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2772, pruned_loss=0.05502, over 3575358.47 frames. ], batch size: 39, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:10:14,493 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35066.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 01:10:38,178 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5460, 3.6035, 5.1339, 4.3445, 3.0955, 2.7600, 4.2800, 5.1601], + device='cuda:1'), covar=tensor([0.0846, 0.1586, 0.0077, 0.0318, 0.0949, 0.1166, 0.0395, 0.0197], + device='cuda:1'), in_proj_covar=tensor([0.0136, 0.0238, 0.0098, 0.0158, 0.0176, 0.0174, 0.0170, 0.0138], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 01:10:45,256 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35092.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:11:03,090 INFO [train.py:898] (1/4) Epoch 10, batch 2400, loss[loss=0.2117, simple_loss=0.3003, pruned_loss=0.0616, over 18292.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2776, pruned_loss=0.05489, over 3579649.36 frames. ], batch size: 57, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:11:46,353 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.146e+02 3.097e+02 3.497e+02 4.481e+02 9.242e+02, threshold=6.993e+02, percent-clipped=1.0 +2023-03-09 01:11:57,254 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35153.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:12:02,046 INFO [train.py:898] (1/4) Epoch 10, batch 2450, loss[loss=0.1563, simple_loss=0.2336, pruned_loss=0.03948, over 18416.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2767, pruned_loss=0.05421, over 3575469.06 frames. ], batch size: 42, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:13:00,755 INFO [train.py:898] (1/4) Epoch 10, batch 2500, loss[loss=0.1894, simple_loss=0.2713, pruned_loss=0.05375, over 18387.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2768, pruned_loss=0.05443, over 3564695.49 frames. ], batch size: 50, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:13:43,833 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.285e+02 3.181e+02 4.060e+02 4.793e+02 9.479e+02, threshold=8.119e+02, percent-clipped=6.0 +2023-03-09 01:13:49,203 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8383, 4.4437, 4.5109, 3.1853, 3.7828, 3.7847, 2.5794, 2.2135], + device='cuda:1'), covar=tensor([0.0199, 0.0179, 0.0074, 0.0321, 0.0269, 0.0165, 0.0756, 0.0943], + device='cuda:1'), in_proj_covar=tensor([0.0057, 0.0046, 0.0046, 0.0057, 0.0078, 0.0055, 0.0072, 0.0077], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-09 01:13:50,188 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35249.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:13:58,869 INFO [train.py:898] (1/4) Epoch 10, batch 2550, loss[loss=0.1543, simple_loss=0.2361, pruned_loss=0.03628, over 18391.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2774, pruned_loss=0.05486, over 3572269.42 frames. ], batch size: 42, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:14:23,525 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35278.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 01:14:27,921 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35282.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:14:45,830 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35297.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:14:57,521 INFO [train.py:898] (1/4) Epoch 10, batch 2600, loss[loss=0.2217, simple_loss=0.3013, pruned_loss=0.07105, over 16080.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2782, pruned_loss=0.05526, over 3561221.75 frames. ], batch size: 94, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:15:20,685 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35326.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:15:25,251 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35330.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:15:40,654 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.265e+02 3.407e+02 3.893e+02 4.729e+02 1.117e+03, threshold=7.786e+02, percent-clipped=1.0 +2023-03-09 01:15:42,243 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35345.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 01:15:56,591 INFO [train.py:898] (1/4) Epoch 10, batch 2650, loss[loss=0.1848, simple_loss=0.265, pruned_loss=0.05233, over 18294.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2772, pruned_loss=0.05472, over 3568758.03 frames. ], batch size: 49, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:16:08,175 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35366.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 01:16:17,135 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35374.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:16:54,655 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35406.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 01:16:55,353 INFO [train.py:898] (1/4) Epoch 10, batch 2700, loss[loss=0.1828, simple_loss=0.2636, pruned_loss=0.05094, over 18240.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2771, pruned_loss=0.0545, over 3583199.90 frames. ], batch size: 45, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:16:55,753 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9400, 5.0378, 4.9955, 4.6575, 4.6452, 4.8235, 5.1352, 4.9711], + device='cuda:1'), covar=tensor([0.0061, 0.0068, 0.0074, 0.0116, 0.0071, 0.0093, 0.0081, 0.0126], + device='cuda:1'), in_proj_covar=tensor([0.0079, 0.0056, 0.0059, 0.0075, 0.0062, 0.0085, 0.0071, 0.0071], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 01:17:03,986 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35414.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 01:17:28,431 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35435.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:17:37,962 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.429e+02 3.322e+02 4.284e+02 5.081e+02 8.413e+02, threshold=8.569e+02, percent-clipped=3.0 +2023-03-09 01:17:42,779 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35448.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:17:53,627 INFO [train.py:898] (1/4) Epoch 10, batch 2750, loss[loss=0.1675, simple_loss=0.244, pruned_loss=0.04548, over 18439.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2763, pruned_loss=0.05428, over 3595092.12 frames. ], batch size: 43, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:18:51,852 INFO [train.py:898] (1/4) Epoch 10, batch 2800, loss[loss=0.1826, simple_loss=0.2623, pruned_loss=0.05144, over 18379.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2757, pruned_loss=0.05431, over 3607241.28 frames. ], batch size: 50, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:19:01,305 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-09 01:19:18,863 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6217, 2.7605, 4.3350, 3.7425, 2.5415, 4.6062, 3.8833, 2.8306], + device='cuda:1'), covar=tensor([0.0392, 0.1279, 0.0153, 0.0311, 0.1530, 0.0149, 0.0348, 0.0993], + device='cuda:1'), in_proj_covar=tensor([0.0178, 0.0211, 0.0144, 0.0137, 0.0209, 0.0177, 0.0198, 0.0186], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 01:19:18,876 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35530.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:19:34,531 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.967e+02 3.480e+02 4.448e+02 5.529e+02 1.797e+03, threshold=8.895e+02, percent-clipped=6.0 +2023-03-09 01:19:49,773 INFO [train.py:898] (1/4) Epoch 10, batch 2850, loss[loss=0.1789, simple_loss=0.2683, pruned_loss=0.04478, over 18546.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2768, pruned_loss=0.0547, over 3596961.11 frames. ], batch size: 49, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:20:22,511 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.34 vs. limit=5.0 +2023-03-09 01:20:29,895 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35591.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:20:47,591 INFO [train.py:898] (1/4) Epoch 10, batch 2900, loss[loss=0.2199, simple_loss=0.3011, pruned_loss=0.0693, over 17243.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2773, pruned_loss=0.05485, over 3601189.75 frames. ], batch size: 78, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:21:17,464 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1481, 5.3050, 3.0239, 5.1597, 5.0024, 5.3584, 5.1975, 2.7494], + device='cuda:1'), covar=tensor([0.0153, 0.0063, 0.0662, 0.0077, 0.0067, 0.0065, 0.0083, 0.0980], + device='cuda:1'), in_proj_covar=tensor([0.0074, 0.0064, 0.0087, 0.0079, 0.0074, 0.0063, 0.0075, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 01:21:32,210 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.306e+02 3.157e+02 3.765e+02 4.717e+02 8.382e+02, threshold=7.531e+02, percent-clipped=0.0 +2023-03-09 01:21:47,244 INFO [train.py:898] (1/4) Epoch 10, batch 2950, loss[loss=0.1822, simple_loss=0.2792, pruned_loss=0.04263, over 18387.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2769, pruned_loss=0.05461, over 3595168.73 frames. ], batch size: 52, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:21:55,467 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-03-09 01:22:39,462 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35701.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 01:22:46,136 INFO [train.py:898] (1/4) Epoch 10, batch 3000, loss[loss=0.1802, simple_loss=0.258, pruned_loss=0.05123, over 18516.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2765, pruned_loss=0.05438, over 3589825.47 frames. ], batch size: 47, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:22:46,137 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 01:22:58,181 INFO [train.py:932] (1/4) Epoch 10, validation: loss=0.1597, simple_loss=0.2619, pruned_loss=0.0287, over 944034.00 frames. +2023-03-09 01:22:58,182 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 01:23:12,573 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.1916, 3.2836, 4.4894, 3.8987, 2.9685, 2.8204, 3.9214, 4.5990], + device='cuda:1'), covar=tensor([0.0946, 0.1454, 0.0133, 0.0356, 0.0860, 0.1093, 0.0383, 0.0250], + device='cuda:1'), in_proj_covar=tensor([0.0134, 0.0234, 0.0098, 0.0156, 0.0173, 0.0171, 0.0168, 0.0138], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 01:23:25,161 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35730.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:23:40,660 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.222e+02 3.466e+02 3.980e+02 4.724e+02 8.688e+02, threshold=7.961e+02, percent-clipped=2.0 +2023-03-09 01:23:45,587 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35748.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:23:56,278 INFO [train.py:898] (1/4) Epoch 10, batch 3050, loss[loss=0.1721, simple_loss=0.2549, pruned_loss=0.04468, over 18504.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2757, pruned_loss=0.05374, over 3598973.80 frames. ], batch size: 47, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:24:13,435 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35771.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:24:42,263 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35796.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:24:43,924 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.52 vs. limit=5.0 +2023-03-09 01:24:45,843 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1775, 5.2446, 4.7326, 5.1526, 5.1704, 4.5092, 5.0690, 4.8288], + device='cuda:1'), covar=tensor([0.0494, 0.0433, 0.1549, 0.0842, 0.0605, 0.0471, 0.0443, 0.1010], + device='cuda:1'), in_proj_covar=tensor([0.0402, 0.0450, 0.0599, 0.0349, 0.0340, 0.0411, 0.0443, 0.0575], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 01:24:55,229 INFO [train.py:898] (1/4) Epoch 10, batch 3100, loss[loss=0.1786, simple_loss=0.2628, pruned_loss=0.04718, over 18339.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2751, pruned_loss=0.05331, over 3606199.73 frames. ], batch size: 46, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:25:00,744 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35811.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:25:26,974 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35832.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:25:39,853 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.088e+02 3.519e+02 4.020e+02 4.842e+02 1.442e+03, threshold=8.041e+02, percent-clipped=6.0 +2023-03-09 01:25:54,167 INFO [train.py:898] (1/4) Epoch 10, batch 3150, loss[loss=0.2203, simple_loss=0.2992, pruned_loss=0.0707, over 18129.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2768, pruned_loss=0.0544, over 3591079.05 frames. ], batch size: 62, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:26:04,169 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8904, 3.8213, 3.5887, 3.3356, 3.5344, 2.9252, 2.9137, 3.7657], + device='cuda:1'), covar=tensor([0.0028, 0.0067, 0.0067, 0.0086, 0.0064, 0.0130, 0.0142, 0.0056], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0116, 0.0104, 0.0152, 0.0105, 0.0149, 0.0156, 0.0089], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 01:26:12,962 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35872.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:26:28,853 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35886.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:26:52,114 INFO [train.py:898] (1/4) Epoch 10, batch 3200, loss[loss=0.2074, simple_loss=0.3039, pruned_loss=0.05546, over 18314.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2762, pruned_loss=0.05405, over 3583201.40 frames. ], batch size: 54, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:27:00,668 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-09 01:27:23,720 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-09 01:27:35,151 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.239e+02 3.585e+02 4.251e+02 5.261e+02 1.507e+03, threshold=8.503e+02, percent-clipped=6.0 +2023-03-09 01:27:49,558 INFO [train.py:898] (1/4) Epoch 10, batch 3250, loss[loss=0.1903, simple_loss=0.2793, pruned_loss=0.05067, over 18308.00 frames. ], tot_loss[loss=0.192, simple_loss=0.276, pruned_loss=0.05403, over 3580238.62 frames. ], batch size: 54, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:28:46,188 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36001.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 01:28:52,530 INFO [train.py:898] (1/4) Epoch 10, batch 3300, loss[loss=0.1514, simple_loss=0.2369, pruned_loss=0.03292, over 18164.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2755, pruned_loss=0.0537, over 3593243.16 frames. ], batch size: 44, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:29:17,205 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.01 vs. limit=2.0 +2023-03-09 01:29:19,623 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36030.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:29:35,213 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.117e+02 3.405e+02 4.202e+02 5.092e+02 8.448e+02, threshold=8.405e+02, percent-clipped=0.0 +2023-03-09 01:29:40,844 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36049.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 01:29:49,540 INFO [train.py:898] (1/4) Epoch 10, batch 3350, loss[loss=0.2219, simple_loss=0.3026, pruned_loss=0.07056, over 18562.00 frames. ], tot_loss[loss=0.192, simple_loss=0.276, pruned_loss=0.05397, over 3598135.37 frames. ], batch size: 54, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:30:13,382 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36078.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:30:47,908 INFO [train.py:898] (1/4) Epoch 10, batch 3400, loss[loss=0.2166, simple_loss=0.2933, pruned_loss=0.07, over 18258.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2761, pruned_loss=0.054, over 3586685.35 frames. ], batch size: 60, lr: 1.11e-02, grad_scale: 8.0 +2023-03-09 01:31:10,808 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36127.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:31:31,612 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.609e+02 3.400e+02 4.178e+02 5.234e+02 8.202e+02, threshold=8.355e+02, percent-clipped=0.0 +2023-03-09 01:31:42,093 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-09 01:31:46,927 INFO [train.py:898] (1/4) Epoch 10, batch 3450, loss[loss=0.2054, simple_loss=0.2945, pruned_loss=0.05812, over 18359.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2757, pruned_loss=0.05348, over 3592579.26 frames. ], batch size: 56, lr: 1.11e-02, grad_scale: 8.0 +2023-03-09 01:31:58,373 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36167.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:32:14,289 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36181.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:32:20,842 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36186.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:32:23,680 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4568, 4.3679, 2.7987, 4.1303, 5.4579, 2.6490, 4.3409, 4.3790], + device='cuda:1'), covar=tensor([0.0063, 0.0805, 0.1224, 0.0507, 0.0032, 0.1127, 0.0458, 0.0451], + device='cuda:1'), in_proj_covar=tensor([0.0109, 0.0222, 0.0186, 0.0183, 0.0085, 0.0171, 0.0197, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 01:32:36,382 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8657, 4.4057, 4.7609, 4.4915, 4.4801, 4.7382, 4.9436, 4.8112], + device='cuda:1'), covar=tensor([0.0068, 0.0123, 0.0119, 0.0124, 0.0091, 0.0117, 0.0100, 0.0159], + device='cuda:1'), in_proj_covar=tensor([0.0080, 0.0057, 0.0059, 0.0075, 0.0062, 0.0087, 0.0072, 0.0071], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 01:32:45,292 INFO [train.py:898] (1/4) Epoch 10, batch 3500, loss[loss=0.1753, simple_loss=0.2632, pruned_loss=0.04365, over 18508.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2755, pruned_loss=0.0533, over 3592034.31 frames. ], batch size: 47, lr: 1.11e-02, grad_scale: 8.0 +2023-03-09 01:33:06,671 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.88 vs. limit=2.0 +2023-03-09 01:33:16,084 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36234.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:33:25,077 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36242.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:33:26,938 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.239e+02 3.308e+02 3.805e+02 4.814e+02 1.268e+03, threshold=7.610e+02, percent-clipped=2.0 +2023-03-09 01:33:41,593 INFO [train.py:898] (1/4) Epoch 10, batch 3550, loss[loss=0.1732, simple_loss=0.2517, pruned_loss=0.04733, over 18232.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2757, pruned_loss=0.05348, over 3587273.68 frames. ], batch size: 45, lr: 1.11e-02, grad_scale: 8.0 +2023-03-09 01:33:43,976 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4418, 5.3734, 4.9971, 5.3301, 5.3406, 4.6937, 5.2814, 5.0340], + device='cuda:1'), covar=tensor([0.0358, 0.0386, 0.1352, 0.0723, 0.0495, 0.0431, 0.0393, 0.0861], + device='cuda:1'), in_proj_covar=tensor([0.0407, 0.0449, 0.0599, 0.0353, 0.0346, 0.0417, 0.0451, 0.0573], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 01:34:33,455 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1332, 4.9323, 5.1736, 5.0589, 4.9587, 5.7417, 5.3756, 5.0840], + device='cuda:1'), covar=tensor([0.0947, 0.0693, 0.0793, 0.0714, 0.1291, 0.0759, 0.0634, 0.1695], + device='cuda:1'), in_proj_covar=tensor([0.0300, 0.0233, 0.0249, 0.0246, 0.0282, 0.0348, 0.0228, 0.0336], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-09 01:34:36,278 INFO [train.py:898] (1/4) Epoch 10, batch 3600, loss[loss=0.1954, simple_loss=0.2704, pruned_loss=0.06013, over 18246.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.275, pruned_loss=0.05325, over 3602831.82 frames. ], batch size: 45, lr: 1.11e-02, grad_scale: 8.0 +2023-03-09 01:34:47,462 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6888, 4.2098, 2.7391, 3.9587, 4.0312, 4.1924, 4.1074, 2.7296], + device='cuda:1'), covar=tensor([0.0167, 0.0054, 0.0598, 0.0200, 0.0073, 0.0072, 0.0092, 0.0754], + device='cuda:1'), in_proj_covar=tensor([0.0075, 0.0064, 0.0086, 0.0079, 0.0073, 0.0063, 0.0074, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 01:34:54,584 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1555, 4.1222, 3.8925, 4.0420, 4.1394, 3.5875, 4.0950, 3.9600], + device='cuda:1'), covar=tensor([0.0486, 0.0670, 0.1393, 0.0813, 0.0555, 0.0529, 0.0475, 0.0936], + device='cuda:1'), in_proj_covar=tensor([0.0410, 0.0455, 0.0605, 0.0356, 0.0350, 0.0421, 0.0454, 0.0575], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 01:35:41,699 INFO [train.py:898] (1/4) Epoch 11, batch 0, loss[loss=0.2177, simple_loss=0.3025, pruned_loss=0.06645, over 16172.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.3025, pruned_loss=0.06645, over 16172.00 frames. ], batch size: 94, lr: 1.06e-02, grad_scale: 8.0 +2023-03-09 01:35:41,699 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 01:35:53,392 INFO [train.py:932] (1/4) Epoch 11, validation: loss=0.1597, simple_loss=0.2625, pruned_loss=0.0284, over 944034.00 frames. +2023-03-09 01:35:53,393 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 01:35:56,699 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.034e+02 3.291e+02 3.800e+02 4.653e+02 8.329e+02, threshold=7.601e+02, percent-clipped=2.0 +2023-03-09 01:36:26,606 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7123, 4.6075, 4.7746, 4.5145, 4.5226, 4.5461, 4.9278, 4.8326], + device='cuda:1'), covar=tensor([0.0061, 0.0076, 0.0076, 0.0098, 0.0068, 0.0117, 0.0060, 0.0095], + device='cuda:1'), in_proj_covar=tensor([0.0079, 0.0056, 0.0058, 0.0074, 0.0061, 0.0086, 0.0071, 0.0071], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 01:36:51,732 INFO [train.py:898] (1/4) Epoch 11, batch 50, loss[loss=0.1904, simple_loss=0.2853, pruned_loss=0.04776, over 18633.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2742, pruned_loss=0.0517, over 818373.52 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 16.0 +2023-03-09 01:37:09,904 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.45 vs. limit=5.0 +2023-03-09 01:37:35,827 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36427.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:37:51,439 INFO [train.py:898] (1/4) Epoch 11, batch 100, loss[loss=0.1783, simple_loss=0.2655, pruned_loss=0.0455, over 18484.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2752, pruned_loss=0.05221, over 1442296.82 frames. ], batch size: 51, lr: 1.06e-02, grad_scale: 16.0 +2023-03-09 01:37:54,912 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.346e+02 3.421e+02 4.363e+02 5.513e+02 1.312e+03, threshold=8.726e+02, percent-clipped=4.0 +2023-03-09 01:38:22,605 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36467.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:38:32,141 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36475.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:38:50,306 INFO [train.py:898] (1/4) Epoch 11, batch 150, loss[loss=0.1932, simple_loss=0.2831, pruned_loss=0.05163, over 15983.00 frames. ], tot_loss[loss=0.189, simple_loss=0.274, pruned_loss=0.05196, over 1919037.13 frames. ], batch size: 94, lr: 1.06e-02, grad_scale: 16.0 +2023-03-09 01:39:18,198 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36515.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:39:44,315 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36537.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:39:44,480 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2499, 5.5665, 2.9405, 5.2835, 5.1796, 5.5753, 5.4387, 2.9039], + device='cuda:1'), covar=tensor([0.0152, 0.0031, 0.0665, 0.0054, 0.0063, 0.0048, 0.0064, 0.0897], + device='cuda:1'), in_proj_covar=tensor([0.0075, 0.0064, 0.0087, 0.0079, 0.0074, 0.0063, 0.0074, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 01:39:48,739 INFO [train.py:898] (1/4) Epoch 11, batch 200, loss[loss=0.1623, simple_loss=0.2448, pruned_loss=0.03989, over 17710.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2725, pruned_loss=0.05198, over 2286574.11 frames. ], batch size: 39, lr: 1.06e-02, grad_scale: 16.0 +2023-03-09 01:39:52,128 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.119e+02 3.228e+02 3.660e+02 4.259e+02 9.099e+02, threshold=7.320e+02, percent-clipped=1.0 +2023-03-09 01:40:47,363 INFO [train.py:898] (1/4) Epoch 11, batch 250, loss[loss=0.2025, simple_loss=0.2885, pruned_loss=0.05828, over 16159.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2737, pruned_loss=0.05262, over 2574431.59 frames. ], batch size: 94, lr: 1.06e-02, grad_scale: 8.0 +2023-03-09 01:41:04,160 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36605.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:41:07,716 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36608.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:41:12,308 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36612.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:41:33,338 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6985, 4.0512, 5.1610, 3.9846, 2.7647, 2.5306, 4.4446, 5.2840], + device='cuda:1'), covar=tensor([0.0766, 0.1131, 0.0080, 0.0416, 0.1083, 0.1244, 0.0339, 0.0233], + device='cuda:1'), in_proj_covar=tensor([0.0135, 0.0240, 0.0101, 0.0158, 0.0176, 0.0174, 0.0170, 0.0141], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 01:41:39,785 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.78 vs. limit=5.0 +2023-03-09 01:41:41,651 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3581, 5.2135, 5.5491, 5.5066, 5.3964, 6.1131, 5.7965, 5.4849], + device='cuda:1'), covar=tensor([0.0850, 0.0590, 0.0655, 0.0586, 0.1244, 0.0727, 0.0592, 0.1378], + device='cuda:1'), in_proj_covar=tensor([0.0303, 0.0235, 0.0251, 0.0249, 0.0281, 0.0349, 0.0231, 0.0338], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:1') +2023-03-09 01:41:47,072 INFO [train.py:898] (1/4) Epoch 11, batch 300, loss[loss=0.2138, simple_loss=0.2975, pruned_loss=0.06505, over 18071.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2733, pruned_loss=0.05242, over 2812133.97 frames. ], batch size: 65, lr: 1.06e-02, grad_scale: 8.0 +2023-03-09 01:41:51,508 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.170e+02 3.364e+02 4.244e+02 4.969e+02 8.450e+02, threshold=8.489e+02, percent-clipped=1.0 +2023-03-09 01:42:15,708 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36666.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:42:19,030 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36669.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:42:23,488 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36673.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:42:45,390 INFO [train.py:898] (1/4) Epoch 11, batch 350, loss[loss=0.1971, simple_loss=0.2804, pruned_loss=0.05689, over 18489.00 frames. ], tot_loss[loss=0.19, simple_loss=0.274, pruned_loss=0.053, over 2971366.99 frames. ], batch size: 51, lr: 1.06e-02, grad_scale: 8.0 +2023-03-09 01:42:55,938 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36700.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 01:43:32,435 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-03-09 01:43:44,245 INFO [train.py:898] (1/4) Epoch 11, batch 400, loss[loss=0.1864, simple_loss=0.2823, pruned_loss=0.04521, over 17778.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2736, pruned_loss=0.05262, over 3108256.27 frames. ], batch size: 70, lr: 1.06e-02, grad_scale: 8.0 +2023-03-09 01:43:48,754 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.208e+02 3.230e+02 3.792e+02 4.617e+02 9.263e+02, threshold=7.584e+02, percent-clipped=1.0 +2023-03-09 01:43:49,050 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4461, 6.0974, 5.4790, 5.8481, 5.6371, 5.5557, 6.1301, 6.0727], + device='cuda:1'), covar=tensor([0.1023, 0.0571, 0.0419, 0.0584, 0.1165, 0.0599, 0.0483, 0.0506], + device='cuda:1'), in_proj_covar=tensor([0.0520, 0.0424, 0.0332, 0.0473, 0.0642, 0.0472, 0.0605, 0.0451], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 01:44:07,372 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36761.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 01:44:42,721 INFO [train.py:898] (1/4) Epoch 11, batch 450, loss[loss=0.199, simple_loss=0.2898, pruned_loss=0.05413, over 18498.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.273, pruned_loss=0.0523, over 3221394.69 frames. ], batch size: 59, lr: 1.05e-02, grad_scale: 8.0 +2023-03-09 01:44:45,856 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.5375, 4.5172, 4.6085, 4.2978, 4.3407, 4.3699, 4.7099, 4.6798], + device='cuda:1'), covar=tensor([0.0070, 0.0081, 0.0060, 0.0108, 0.0068, 0.0131, 0.0090, 0.0102], + device='cuda:1'), in_proj_covar=tensor([0.0079, 0.0056, 0.0058, 0.0074, 0.0061, 0.0086, 0.0071, 0.0072], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 01:44:56,905 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.95 vs. limit=5.0 +2023-03-09 01:44:59,847 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36805.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 01:45:36,420 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36837.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:45:41,926 INFO [train.py:898] (1/4) Epoch 11, batch 500, loss[loss=0.1728, simple_loss=0.2571, pruned_loss=0.04422, over 18537.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2738, pruned_loss=0.05231, over 3308660.27 frames. ], batch size: 49, lr: 1.05e-02, grad_scale: 8.0 +2023-03-09 01:45:47,158 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.052e+02 3.251e+02 4.103e+02 5.001e+02 1.385e+03, threshold=8.205e+02, percent-clipped=3.0 +2023-03-09 01:46:12,018 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36866.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 01:46:14,139 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9273, 3.1437, 4.3081, 3.8257, 2.8598, 4.7470, 4.0716, 3.1755], + device='cuda:1'), covar=tensor([0.0412, 0.1335, 0.0264, 0.0368, 0.1508, 0.0201, 0.0403, 0.0944], + device='cuda:1'), in_proj_covar=tensor([0.0185, 0.0223, 0.0149, 0.0143, 0.0212, 0.0184, 0.0207, 0.0192], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 01:46:33,289 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36885.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:46:39,966 INFO [train.py:898] (1/4) Epoch 11, batch 550, loss[loss=0.211, simple_loss=0.2941, pruned_loss=0.06394, over 17019.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2735, pruned_loss=0.05205, over 3364939.68 frames. ], batch size: 78, lr: 1.05e-02, grad_scale: 8.0 +2023-03-09 01:47:04,715 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36911.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:47:13,788 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2901, 5.1360, 5.4039, 5.3934, 5.2280, 5.9647, 5.6394, 5.3336], + device='cuda:1'), covar=tensor([0.0816, 0.0594, 0.0687, 0.0680, 0.1412, 0.0747, 0.0635, 0.1678], + device='cuda:1'), in_proj_covar=tensor([0.0307, 0.0239, 0.0254, 0.0251, 0.0287, 0.0354, 0.0232, 0.0344], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:1') +2023-03-09 01:47:28,915 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6490, 3.3847, 1.9061, 4.4784, 3.3242, 4.3215, 2.1792, 3.9016], + device='cuda:1'), covar=tensor([0.0470, 0.0708, 0.1547, 0.0328, 0.0682, 0.0252, 0.1340, 0.0364], + device='cuda:1'), in_proj_covar=tensor([0.0190, 0.0211, 0.0176, 0.0233, 0.0181, 0.0238, 0.0190, 0.0183], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 01:47:38,615 INFO [train.py:898] (1/4) Epoch 11, batch 600, loss[loss=0.2153, simple_loss=0.2942, pruned_loss=0.06816, over 18479.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2735, pruned_loss=0.05209, over 3408258.65 frames. ], batch size: 59, lr: 1.05e-02, grad_scale: 8.0 +2023-03-09 01:47:43,325 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.417e+02 3.303e+02 3.773e+02 4.556e+02 8.624e+02, threshold=7.545e+02, percent-clipped=1.0 +2023-03-09 01:48:00,214 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36958.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:48:03,430 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36961.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:48:06,918 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36964.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:48:11,435 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36968.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:48:16,117 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7095, 2.8040, 4.2274, 3.8712, 2.4903, 4.6760, 3.9560, 2.9254], + device='cuda:1'), covar=tensor([0.0420, 0.1432, 0.0291, 0.0317, 0.1704, 0.0169, 0.0445, 0.0962], + device='cuda:1'), in_proj_covar=tensor([0.0186, 0.0224, 0.0149, 0.0143, 0.0212, 0.0184, 0.0208, 0.0192], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 01:48:16,167 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36972.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:48:34,484 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.4114, 5.5594, 3.1999, 5.3790, 5.2475, 5.6537, 5.4472, 2.9135], + device='cuda:1'), covar=tensor([0.0126, 0.0046, 0.0572, 0.0055, 0.0063, 0.0043, 0.0065, 0.0806], + device='cuda:1'), in_proj_covar=tensor([0.0075, 0.0065, 0.0087, 0.0080, 0.0074, 0.0063, 0.0075, 0.0089], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 01:48:37,478 INFO [train.py:898] (1/4) Epoch 11, batch 650, loss[loss=0.1944, simple_loss=0.2842, pruned_loss=0.05229, over 18407.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2739, pruned_loss=0.05208, over 3459815.80 frames. ], batch size: 52, lr: 1.05e-02, grad_scale: 8.0 +2023-03-09 01:48:54,530 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37004.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:49:06,434 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0055, 5.3078, 2.7296, 5.1968, 5.0386, 5.4172, 5.2049, 2.4158], + device='cuda:1'), covar=tensor([0.0182, 0.0051, 0.0779, 0.0062, 0.0066, 0.0049, 0.0079, 0.1054], + device='cuda:1'), in_proj_covar=tensor([0.0075, 0.0065, 0.0087, 0.0080, 0.0074, 0.0064, 0.0076, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 01:49:11,970 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37019.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:49:36,618 INFO [train.py:898] (1/4) Epoch 11, batch 700, loss[loss=0.1816, simple_loss=0.2671, pruned_loss=0.04803, over 16105.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2744, pruned_loss=0.05229, over 3493021.00 frames. ], batch size: 94, lr: 1.05e-02, grad_scale: 8.0 +2023-03-09 01:49:40,956 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.338e+02 3.308e+02 3.875e+02 4.751e+02 1.116e+03, threshold=7.751e+02, percent-clipped=5.0 +2023-03-09 01:49:54,681 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37056.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 01:49:55,332 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-03-09 01:50:05,406 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37065.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:50:34,407 INFO [train.py:898] (1/4) Epoch 11, batch 750, loss[loss=0.1857, simple_loss=0.2729, pruned_loss=0.04925, over 18243.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2749, pruned_loss=0.05279, over 3513253.86 frames. ], batch size: 45, lr: 1.05e-02, grad_scale: 8.0 +2023-03-09 01:50:50,442 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-09 01:51:33,950 INFO [train.py:898] (1/4) Epoch 11, batch 800, loss[loss=0.1635, simple_loss=0.2418, pruned_loss=0.04259, over 17639.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2737, pruned_loss=0.05251, over 3539374.79 frames. ], batch size: 39, lr: 1.05e-02, grad_scale: 8.0 +2023-03-09 01:51:38,505 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.114e+02 3.217e+02 3.576e+02 4.437e+02 1.024e+03, threshold=7.151e+02, percent-clipped=5.0 +2023-03-09 01:51:58,756 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37161.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 01:52:33,371 INFO [train.py:898] (1/4) Epoch 11, batch 850, loss[loss=0.1609, simple_loss=0.2532, pruned_loss=0.03429, over 18297.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2743, pruned_loss=0.05263, over 3539219.41 frames. ], batch size: 49, lr: 1.05e-02, grad_scale: 8.0 +2023-03-09 01:53:32,674 INFO [train.py:898] (1/4) Epoch 11, batch 900, loss[loss=0.1745, simple_loss=0.2559, pruned_loss=0.04658, over 18522.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2735, pruned_loss=0.05253, over 3536584.11 frames. ], batch size: 49, lr: 1.05e-02, grad_scale: 4.0 +2023-03-09 01:53:38,419 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.869e+02 3.048e+02 3.504e+02 4.549e+02 1.028e+03, threshold=7.008e+02, percent-clipped=4.0 +2023-03-09 01:53:56,465 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37261.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:54:00,420 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37264.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:54:04,630 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37267.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:54:05,928 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37268.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:54:05,970 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37268.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:54:32,483 INFO [train.py:898] (1/4) Epoch 11, batch 950, loss[loss=0.1884, simple_loss=0.259, pruned_loss=0.05889, over 18415.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2738, pruned_loss=0.05281, over 3547971.61 frames. ], batch size: 42, lr: 1.05e-02, grad_scale: 4.0 +2023-03-09 01:54:49,465 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3396, 2.8169, 2.5485, 2.6785, 3.5131, 3.2548, 2.9980, 2.7536], + device='cuda:1'), covar=tensor([0.0162, 0.0275, 0.0600, 0.0358, 0.0151, 0.0170, 0.0334, 0.0306], + device='cuda:1'), in_proj_covar=tensor([0.0114, 0.0103, 0.0149, 0.0133, 0.0099, 0.0086, 0.0132, 0.0126], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 01:54:52,610 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37309.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:54:56,034 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37312.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:54:58,358 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37314.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:55:01,083 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37316.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:55:18,072 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37329.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:55:31,181 INFO [train.py:898] (1/4) Epoch 11, batch 1000, loss[loss=0.1804, simple_loss=0.2516, pruned_loss=0.05457, over 18477.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.274, pruned_loss=0.05281, over 3553519.76 frames. ], batch size: 44, lr: 1.05e-02, grad_scale: 4.0 +2023-03-09 01:55:36,617 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.267e+02 3.387e+02 4.010e+02 5.026e+02 9.863e+02, threshold=8.020e+02, percent-clipped=4.0 +2023-03-09 01:55:48,138 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37356.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 01:55:52,762 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37360.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:55:57,470 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37364.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:56:29,968 INFO [train.py:898] (1/4) Epoch 11, batch 1050, loss[loss=0.2015, simple_loss=0.2847, pruned_loss=0.05916, over 17796.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.275, pruned_loss=0.05336, over 3555221.97 frames. ], batch size: 70, lr: 1.05e-02, grad_scale: 4.0 +2023-03-09 01:56:44,662 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37404.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 01:57:04,090 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.3047, 5.4557, 2.5836, 5.2755, 5.2130, 5.4944, 5.3205, 2.7633], + device='cuda:1'), covar=tensor([0.0135, 0.0045, 0.0740, 0.0054, 0.0055, 0.0060, 0.0076, 0.0867], + device='cuda:1'), in_proj_covar=tensor([0.0076, 0.0066, 0.0088, 0.0079, 0.0074, 0.0065, 0.0076, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 01:57:09,666 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37425.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:57:15,366 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-09 01:57:15,886 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2003, 5.0237, 5.3357, 5.3951, 5.0909, 5.8854, 5.5512, 5.2388], + device='cuda:1'), covar=tensor([0.0834, 0.0631, 0.0724, 0.0603, 0.1416, 0.0791, 0.0602, 0.1472], + device='cuda:1'), in_proj_covar=tensor([0.0308, 0.0236, 0.0255, 0.0254, 0.0290, 0.0360, 0.0236, 0.0347], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:1') +2023-03-09 01:57:28,534 INFO [train.py:898] (1/4) Epoch 11, batch 1100, loss[loss=0.2311, simple_loss=0.305, pruned_loss=0.07861, over 12740.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2742, pruned_loss=0.05298, over 3566641.20 frames. ], batch size: 129, lr: 1.05e-02, grad_scale: 4.0 +2023-03-09 01:57:34,021 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.186e+02 3.484e+02 3.927e+02 4.853e+02 9.182e+02, threshold=7.853e+02, percent-clipped=3.0 +2023-03-09 01:57:51,293 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37461.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 01:57:56,425 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-09 01:58:27,576 INFO [train.py:898] (1/4) Epoch 11, batch 1150, loss[loss=0.1638, simple_loss=0.2499, pruned_loss=0.03883, over 18515.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2745, pruned_loss=0.05314, over 3573674.93 frames. ], batch size: 44, lr: 1.04e-02, grad_scale: 4.0 +2023-03-09 01:58:48,403 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37509.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 01:59:26,268 INFO [train.py:898] (1/4) Epoch 11, batch 1200, loss[loss=0.1829, simple_loss=0.2722, pruned_loss=0.0468, over 18488.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2738, pruned_loss=0.05281, over 3581142.99 frames. ], batch size: 51, lr: 1.04e-02, grad_scale: 8.0 +2023-03-09 01:59:31,795 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.211e+02 3.034e+02 3.620e+02 4.493e+02 1.296e+03, threshold=7.239e+02, percent-clipped=3.0 +2023-03-09 01:59:32,329 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6309, 2.2132, 2.7895, 2.9393, 3.4283, 5.2046, 4.6807, 3.9312], + device='cuda:1'), covar=tensor([0.1102, 0.1708, 0.2150, 0.1139, 0.1566, 0.0085, 0.0304, 0.0454], + device='cuda:1'), in_proj_covar=tensor([0.0236, 0.0293, 0.0306, 0.0243, 0.0355, 0.0184, 0.0256, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 01:59:55,201 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-09 01:59:55,795 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37567.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:59:55,988 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4646, 2.7254, 2.5524, 2.6182, 3.5342, 3.4777, 2.9450, 2.7845], + device='cuda:1'), covar=tensor([0.0169, 0.0274, 0.0588, 0.0381, 0.0163, 0.0129, 0.0322, 0.0332], + device='cuda:1'), in_proj_covar=tensor([0.0115, 0.0103, 0.0148, 0.0134, 0.0101, 0.0086, 0.0132, 0.0126], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 02:00:24,873 INFO [train.py:898] (1/4) Epoch 11, batch 1250, loss[loss=0.176, simple_loss=0.2482, pruned_loss=0.05196, over 18454.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2741, pruned_loss=0.05302, over 3578433.81 frames. ], batch size: 43, lr: 1.04e-02, grad_scale: 8.0 +2023-03-09 02:00:52,577 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37614.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:00:53,589 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37615.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:01:04,118 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37624.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:01:24,578 INFO [train.py:898] (1/4) Epoch 11, batch 1300, loss[loss=0.206, simple_loss=0.2908, pruned_loss=0.06065, over 18352.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2748, pruned_loss=0.05326, over 3576977.45 frames. ], batch size: 56, lr: 1.04e-02, grad_scale: 8.0 +2023-03-09 02:01:31,765 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.066e+02 3.446e+02 4.010e+02 4.726e+02 9.288e+02, threshold=8.020e+02, percent-clipped=3.0 +2023-03-09 02:01:46,683 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37660.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:01:48,851 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37662.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:02:23,187 INFO [train.py:898] (1/4) Epoch 11, batch 1350, loss[loss=0.155, simple_loss=0.2413, pruned_loss=0.03433, over 17704.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2749, pruned_loss=0.0529, over 3594282.50 frames. ], batch size: 39, lr: 1.04e-02, grad_scale: 4.0 +2023-03-09 02:02:43,280 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37708.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:02:56,757 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37720.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:03:04,874 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6014, 2.0959, 2.4361, 2.5735, 3.0190, 4.4436, 4.0299, 3.5560], + device='cuda:1'), covar=tensor([0.1172, 0.1938, 0.2453, 0.1454, 0.1828, 0.0166, 0.0436, 0.0470], + device='cuda:1'), in_proj_covar=tensor([0.0242, 0.0299, 0.0313, 0.0249, 0.0361, 0.0187, 0.0261, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 02:03:21,772 INFO [train.py:898] (1/4) Epoch 11, batch 1400, loss[loss=0.1944, simple_loss=0.2807, pruned_loss=0.05408, over 17138.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2746, pruned_loss=0.05254, over 3594285.63 frames. ], batch size: 78, lr: 1.04e-02, grad_scale: 4.0 +2023-03-09 02:03:29,181 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.922e+02 3.017e+02 3.586e+02 4.269e+02 9.001e+02, threshold=7.171e+02, percent-clipped=1.0 +2023-03-09 02:03:32,404 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 02:04:20,147 INFO [train.py:898] (1/4) Epoch 11, batch 1450, loss[loss=0.2129, simple_loss=0.2876, pruned_loss=0.06907, over 12711.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2744, pruned_loss=0.05222, over 3592486.64 frames. ], batch size: 130, lr: 1.04e-02, grad_scale: 4.0 +2023-03-09 02:04:26,322 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3715, 4.4340, 2.8367, 4.5303, 5.3058, 2.5942, 4.2405, 4.2116], + device='cuda:1'), covar=tensor([0.0049, 0.0752, 0.1155, 0.0398, 0.0046, 0.1136, 0.0461, 0.0521], + device='cuda:1'), in_proj_covar=tensor([0.0111, 0.0226, 0.0186, 0.0184, 0.0088, 0.0170, 0.0195, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 02:05:06,843 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-09 02:05:19,789 INFO [train.py:898] (1/4) Epoch 11, batch 1500, loss[loss=0.1999, simple_loss=0.2802, pruned_loss=0.05975, over 15940.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2747, pruned_loss=0.05251, over 3579910.13 frames. ], batch size: 94, lr: 1.04e-02, grad_scale: 4.0 +2023-03-09 02:05:27,762 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.812e+02 3.086e+02 3.620e+02 4.300e+02 1.406e+03, threshold=7.239e+02, percent-clipped=4.0 +2023-03-09 02:06:18,412 INFO [train.py:898] (1/4) Epoch 11, batch 1550, loss[loss=0.1989, simple_loss=0.2877, pruned_loss=0.05506, over 18485.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2742, pruned_loss=0.05227, over 3589598.96 frames. ], batch size: 51, lr: 1.04e-02, grad_scale: 4.0 +2023-03-09 02:06:43,932 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7911, 4.5366, 4.8163, 4.4992, 4.5365, 4.8530, 4.9777, 4.7794], + device='cuda:1'), covar=tensor([0.0171, 0.0154, 0.0130, 0.0152, 0.0109, 0.0143, 0.0131, 0.0181], + device='cuda:1'), in_proj_covar=tensor([0.0079, 0.0055, 0.0058, 0.0074, 0.0061, 0.0085, 0.0071, 0.0070], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 02:06:45,287 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-09 02:06:58,473 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37924.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:07:17,456 INFO [train.py:898] (1/4) Epoch 11, batch 1600, loss[loss=0.2044, simple_loss=0.2883, pruned_loss=0.06029, over 17116.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2731, pruned_loss=0.05196, over 3582793.48 frames. ], batch size: 78, lr: 1.04e-02, grad_scale: 8.0 +2023-03-09 02:07:24,291 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.933e+02 3.097e+02 3.749e+02 4.631e+02 9.709e+02, threshold=7.497e+02, percent-clipped=4.0 +2023-03-09 02:07:37,501 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-09 02:07:53,539 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9468, 5.2490, 2.7479, 5.1308, 4.9894, 5.3160, 5.0023, 2.4117], + device='cuda:1'), covar=tensor([0.0181, 0.0053, 0.0734, 0.0064, 0.0061, 0.0050, 0.0102, 0.1066], + device='cuda:1'), in_proj_covar=tensor([0.0076, 0.0066, 0.0088, 0.0079, 0.0075, 0.0065, 0.0077, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 02:07:54,471 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37972.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:08:02,515 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37979.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:08:15,894 INFO [train.py:898] (1/4) Epoch 11, batch 1650, loss[loss=0.2013, simple_loss=0.2842, pruned_loss=0.05918, over 18487.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2736, pruned_loss=0.05213, over 3586853.44 frames. ], batch size: 53, lr: 1.04e-02, grad_scale: 8.0 +2023-03-09 02:08:55,150 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=38020.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:09:18,070 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38040.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:09:18,735 INFO [train.py:898] (1/4) Epoch 11, batch 1700, loss[loss=0.1876, simple_loss=0.282, pruned_loss=0.04659, over 18466.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2732, pruned_loss=0.05206, over 3589201.76 frames. ], batch size: 53, lr: 1.04e-02, grad_scale: 8.0 +2023-03-09 02:09:25,205 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.262e+02 3.409e+02 3.922e+02 5.492e+02 2.210e+03, threshold=7.843e+02, percent-clipped=9.0 +2023-03-09 02:09:50,079 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=38068.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:09:51,757 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-09 02:10:16,099 INFO [train.py:898] (1/4) Epoch 11, batch 1750, loss[loss=0.1832, simple_loss=0.2729, pruned_loss=0.04673, over 18565.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2743, pruned_loss=0.05283, over 3595655.01 frames. ], batch size: 54, lr: 1.04e-02, grad_scale: 8.0 +2023-03-09 02:10:39,249 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.32 vs. limit=5.0 +2023-03-09 02:10:40,438 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-03-09 02:10:44,292 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5775, 3.5278, 4.9761, 4.2947, 3.0705, 2.7963, 4.3375, 5.1216], + device='cuda:1'), covar=tensor([0.0907, 0.1577, 0.0102, 0.0366, 0.1039, 0.1200, 0.0409, 0.0190], + device='cuda:1'), in_proj_covar=tensor([0.0136, 0.0242, 0.0101, 0.0159, 0.0176, 0.0173, 0.0171, 0.0143], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 02:11:15,145 INFO [train.py:898] (1/4) Epoch 11, batch 1800, loss[loss=0.1579, simple_loss=0.23, pruned_loss=0.04284, over 18426.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2741, pruned_loss=0.05292, over 3581082.16 frames. ], batch size: 43, lr: 1.04e-02, grad_scale: 8.0 +2023-03-09 02:11:21,544 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.087e+02 3.079e+02 3.713e+02 4.653e+02 8.656e+02, threshold=7.427e+02, percent-clipped=3.0 +2023-03-09 02:12:12,799 INFO [train.py:898] (1/4) Epoch 11, batch 1850, loss[loss=0.1846, simple_loss=0.2738, pruned_loss=0.04765, over 18386.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2749, pruned_loss=0.05346, over 3575903.62 frames. ], batch size: 52, lr: 1.04e-02, grad_scale: 8.0 +2023-03-09 02:12:33,272 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7360, 5.2446, 5.2332, 5.2792, 4.7768, 5.1142, 4.5681, 5.0656], + device='cuda:1'), covar=tensor([0.0214, 0.0288, 0.0190, 0.0299, 0.0330, 0.0222, 0.1071, 0.0306], + device='cuda:1'), in_proj_covar=tensor([0.0174, 0.0219, 0.0206, 0.0247, 0.0221, 0.0224, 0.0282, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:1') +2023-03-09 02:12:38,914 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.74 vs. limit=5.0 +2023-03-09 02:13:12,770 INFO [train.py:898] (1/4) Epoch 11, batch 1900, loss[loss=0.1694, simple_loss=0.2583, pruned_loss=0.04024, over 18350.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2738, pruned_loss=0.05263, over 3584150.80 frames. ], batch size: 46, lr: 1.03e-02, grad_scale: 8.0 +2023-03-09 02:13:19,700 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.086e+02 3.362e+02 3.965e+02 4.724e+02 1.180e+03, threshold=7.931e+02, percent-clipped=5.0 +2023-03-09 02:14:11,578 INFO [train.py:898] (1/4) Epoch 11, batch 1950, loss[loss=0.2222, simple_loss=0.2992, pruned_loss=0.07266, over 18393.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2734, pruned_loss=0.0522, over 3585039.23 frames. ], batch size: 52, lr: 1.03e-02, grad_scale: 8.0 +2023-03-09 02:14:17,937 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-09 02:15:04,204 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=38335.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:15:10,797 INFO [train.py:898] (1/4) Epoch 11, batch 2000, loss[loss=0.1978, simple_loss=0.2786, pruned_loss=0.05854, over 16312.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.273, pruned_loss=0.05196, over 3582273.01 frames. ], batch size: 94, lr: 1.03e-02, grad_scale: 8.0 +2023-03-09 02:15:17,653 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.188e+02 3.163e+02 3.706e+02 4.529e+02 9.366e+02, threshold=7.411e+02, percent-clipped=1.0 +2023-03-09 02:15:32,789 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38360.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:15:35,847 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7607, 2.9596, 4.2672, 4.0858, 2.6177, 4.6366, 4.0969, 3.1060], + device='cuda:1'), covar=tensor([0.0310, 0.1210, 0.0177, 0.0198, 0.1381, 0.0162, 0.0292, 0.0774], + device='cuda:1'), in_proj_covar=tensor([0.0184, 0.0216, 0.0147, 0.0140, 0.0207, 0.0179, 0.0203, 0.0184], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 02:16:08,896 INFO [train.py:898] (1/4) Epoch 11, batch 2050, loss[loss=0.202, simple_loss=0.2887, pruned_loss=0.05768, over 18136.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2725, pruned_loss=0.05192, over 3588098.20 frames. ], batch size: 62, lr: 1.03e-02, grad_scale: 8.0 +2023-03-09 02:16:44,636 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38421.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:16:53,537 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.75 vs. limit=5.0 +2023-03-09 02:17:01,289 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1869, 4.2482, 2.3662, 4.2847, 5.1864, 2.5333, 3.6062, 3.7044], + device='cuda:1'), covar=tensor([0.0088, 0.0996, 0.1640, 0.0516, 0.0053, 0.1302, 0.0802, 0.0841], + device='cuda:1'), in_proj_covar=tensor([0.0113, 0.0227, 0.0186, 0.0183, 0.0090, 0.0171, 0.0197, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 02:17:08,306 INFO [train.py:898] (1/4) Epoch 11, batch 2100, loss[loss=0.2002, simple_loss=0.2855, pruned_loss=0.05749, over 17088.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2727, pruned_loss=0.05182, over 3595036.74 frames. ], batch size: 78, lr: 1.03e-02, grad_scale: 4.0 +2023-03-09 02:17:15,909 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.369e+02 3.250e+02 4.019e+02 4.989e+02 1.105e+03, threshold=8.037e+02, percent-clipped=2.0 +2023-03-09 02:17:21,210 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.47 vs. limit=5.0 +2023-03-09 02:17:42,199 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9912, 4.5780, 4.7147, 3.4772, 3.9014, 3.6184, 2.6626, 2.4180], + device='cuda:1'), covar=tensor([0.0150, 0.0113, 0.0051, 0.0244, 0.0247, 0.0181, 0.0751, 0.0829], + device='cuda:1'), in_proj_covar=tensor([0.0056, 0.0047, 0.0046, 0.0058, 0.0079, 0.0055, 0.0071, 0.0077], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-09 02:18:06,739 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-09 02:18:07,077 INFO [train.py:898] (1/4) Epoch 11, batch 2150, loss[loss=0.1557, simple_loss=0.2369, pruned_loss=0.03723, over 18372.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2724, pruned_loss=0.05162, over 3596003.94 frames. ], batch size: 42, lr: 1.03e-02, grad_scale: 4.0 +2023-03-09 02:19:05,991 INFO [train.py:898] (1/4) Epoch 11, batch 2200, loss[loss=0.1964, simple_loss=0.2895, pruned_loss=0.05165, over 18000.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2736, pruned_loss=0.05218, over 3591423.27 frames. ], batch size: 65, lr: 1.03e-02, grad_scale: 4.0 +2023-03-09 02:19:13,834 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.988e+02 3.259e+02 3.995e+02 5.001e+02 1.029e+03, threshold=7.990e+02, percent-clipped=4.0 +2023-03-09 02:19:48,336 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8139, 2.9830, 4.4280, 4.1922, 2.5999, 4.7569, 4.0639, 2.9452], + device='cuda:1'), covar=tensor([0.0359, 0.1266, 0.0178, 0.0220, 0.1484, 0.0159, 0.0411, 0.0935], + device='cuda:1'), in_proj_covar=tensor([0.0185, 0.0215, 0.0147, 0.0139, 0.0208, 0.0178, 0.0203, 0.0187], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 02:20:04,935 INFO [train.py:898] (1/4) Epoch 11, batch 2250, loss[loss=0.2363, simple_loss=0.3039, pruned_loss=0.08438, over 12608.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2737, pruned_loss=0.05216, over 3584803.05 frames. ], batch size: 130, lr: 1.03e-02, grad_scale: 4.0 +2023-03-09 02:20:55,071 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7174, 2.8749, 4.3306, 3.8806, 2.6170, 4.6120, 3.9257, 2.7052], + device='cuda:1'), covar=tensor([0.0410, 0.1349, 0.0209, 0.0320, 0.1447, 0.0154, 0.0409, 0.1038], + device='cuda:1'), in_proj_covar=tensor([0.0185, 0.0217, 0.0148, 0.0140, 0.0210, 0.0180, 0.0205, 0.0189], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 02:20:57,294 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=38635.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:21:04,276 INFO [train.py:898] (1/4) Epoch 11, batch 2300, loss[loss=0.1612, simple_loss=0.2382, pruned_loss=0.04214, over 18441.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2739, pruned_loss=0.05244, over 3570679.79 frames. ], batch size: 43, lr: 1.03e-02, grad_scale: 4.0 +2023-03-09 02:21:12,481 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.254e+02 3.045e+02 3.798e+02 4.329e+02 8.065e+02, threshold=7.597e+02, percent-clipped=1.0 +2023-03-09 02:21:53,530 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=38683.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:22:03,149 INFO [train.py:898] (1/4) Epoch 11, batch 2350, loss[loss=0.1824, simple_loss=0.2547, pruned_loss=0.05501, over 17701.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2738, pruned_loss=0.0522, over 3578354.94 frames. ], batch size: 39, lr: 1.03e-02, grad_scale: 4.0 +2023-03-09 02:22:32,484 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=38716.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:22:41,977 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.83 vs. limit=5.0 +2023-03-09 02:23:01,651 INFO [train.py:898] (1/4) Epoch 11, batch 2400, loss[loss=0.2062, simple_loss=0.2903, pruned_loss=0.06106, over 17044.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2743, pruned_loss=0.05234, over 3576031.63 frames. ], batch size: 78, lr: 1.03e-02, grad_scale: 8.0 +2023-03-09 02:23:10,152 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.250e+02 3.115e+02 4.067e+02 4.907e+02 9.173e+02, threshold=8.134e+02, percent-clipped=4.0 +2023-03-09 02:23:28,396 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5127, 6.0632, 5.4726, 5.8808, 5.6275, 5.5554, 6.1211, 6.0404], + device='cuda:1'), covar=tensor([0.1203, 0.0645, 0.0466, 0.0595, 0.1334, 0.0687, 0.0490, 0.0581], + device='cuda:1'), in_proj_covar=tensor([0.0530, 0.0439, 0.0333, 0.0472, 0.0647, 0.0476, 0.0621, 0.0461], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 02:23:28,789 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-09 02:24:00,850 INFO [train.py:898] (1/4) Epoch 11, batch 2450, loss[loss=0.1716, simple_loss=0.2606, pruned_loss=0.04123, over 18407.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.275, pruned_loss=0.05258, over 3578495.40 frames. ], batch size: 48, lr: 1.03e-02, grad_scale: 8.0 +2023-03-09 02:24:38,782 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38823.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:24:59,957 INFO [train.py:898] (1/4) Epoch 11, batch 2500, loss[loss=0.2015, simple_loss=0.2816, pruned_loss=0.0607, over 18362.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2753, pruned_loss=0.05282, over 3580403.86 frames. ], batch size: 56, lr: 1.03e-02, grad_scale: 8.0 +2023-03-09 02:25:08,486 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.178e+02 3.118e+02 3.887e+02 4.654e+02 1.248e+03, threshold=7.775e+02, percent-clipped=2.0 +2023-03-09 02:25:50,690 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38884.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:25:58,816 INFO [train.py:898] (1/4) Epoch 11, batch 2550, loss[loss=0.1581, simple_loss=0.2374, pruned_loss=0.03944, over 18371.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2739, pruned_loss=0.05235, over 3586296.63 frames. ], batch size: 42, lr: 1.03e-02, grad_scale: 8.0 +2023-03-09 02:26:09,629 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6662, 3.6014, 3.3641, 3.0577, 3.4610, 2.6596, 2.7236, 3.6965], + device='cuda:1'), covar=tensor([0.0037, 0.0068, 0.0090, 0.0123, 0.0063, 0.0175, 0.0177, 0.0043], + device='cuda:1'), in_proj_covar=tensor([0.0098, 0.0120, 0.0108, 0.0155, 0.0106, 0.0151, 0.0157, 0.0089], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 02:26:36,445 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9226, 5.5191, 5.4456, 5.4432, 4.9747, 5.3217, 4.7642, 5.3367], + device='cuda:1'), covar=tensor([0.0183, 0.0214, 0.0161, 0.0252, 0.0312, 0.0194, 0.0949, 0.0249], + device='cuda:1'), in_proj_covar=tensor([0.0174, 0.0220, 0.0206, 0.0247, 0.0220, 0.0224, 0.0281, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:1') +2023-03-09 02:26:57,548 INFO [train.py:898] (1/4) Epoch 11, batch 2600, loss[loss=0.1809, simple_loss=0.2667, pruned_loss=0.04754, over 18281.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2744, pruned_loss=0.05258, over 3586201.13 frames. ], batch size: 49, lr: 1.03e-02, grad_scale: 8.0 +2023-03-09 02:27:06,468 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.982e+02 2.998e+02 3.498e+02 4.234e+02 9.480e+02, threshold=6.995e+02, percent-clipped=2.0 +2023-03-09 02:27:26,200 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.19 vs. limit=5.0 +2023-03-09 02:27:56,939 INFO [train.py:898] (1/4) Epoch 11, batch 2650, loss[loss=0.1861, simple_loss=0.2726, pruned_loss=0.04981, over 18416.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2745, pruned_loss=0.05245, over 3587163.00 frames. ], batch size: 52, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:28:15,983 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-09 02:28:27,542 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39016.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:28:56,248 INFO [train.py:898] (1/4) Epoch 11, batch 2700, loss[loss=0.2317, simple_loss=0.3068, pruned_loss=0.07832, over 13061.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2747, pruned_loss=0.05279, over 3570012.26 frames. ], batch size: 129, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:29:04,928 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.335e+02 3.292e+02 3.968e+02 4.768e+02 1.831e+03, threshold=7.936e+02, percent-clipped=8.0 +2023-03-09 02:29:24,688 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39064.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:29:55,891 INFO [train.py:898] (1/4) Epoch 11, batch 2750, loss[loss=0.1889, simple_loss=0.2812, pruned_loss=0.0483, over 18177.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2743, pruned_loss=0.05259, over 3558499.04 frames. ], batch size: 62, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:30:55,605 INFO [train.py:898] (1/4) Epoch 11, batch 2800, loss[loss=0.1785, simple_loss=0.2597, pruned_loss=0.04871, over 18566.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2738, pruned_loss=0.05237, over 3570915.26 frames. ], batch size: 49, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:31:04,047 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.053e+02 3.386e+02 4.032e+02 4.876e+02 1.472e+03, threshold=8.064e+02, percent-clipped=5.0 +2023-03-09 02:31:42,089 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39179.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:31:48,996 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39185.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:31:55,188 INFO [train.py:898] (1/4) Epoch 11, batch 2850, loss[loss=0.2083, simple_loss=0.2953, pruned_loss=0.06063, over 18274.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2741, pruned_loss=0.05255, over 3578028.80 frames. ], batch size: 60, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:32:54,403 INFO [train.py:898] (1/4) Epoch 11, batch 2900, loss[loss=0.2265, simple_loss=0.3051, pruned_loss=0.07393, over 12253.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2741, pruned_loss=0.05248, over 3572093.02 frames. ], batch size: 130, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:33:00,515 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39246.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:33:02,134 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.400e+02 3.149e+02 3.663e+02 4.555e+02 1.238e+03, threshold=7.326e+02, percent-clipped=2.0 +2023-03-09 02:33:45,289 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39284.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:33:53,615 INFO [train.py:898] (1/4) Epoch 11, batch 2950, loss[loss=0.1917, simple_loss=0.2819, pruned_loss=0.05069, over 18356.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2736, pruned_loss=0.05187, over 3572157.42 frames. ], batch size: 55, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:33:57,558 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4901, 3.2076, 2.2585, 4.3492, 2.9542, 4.2786, 2.2832, 3.7563], + device='cuda:1'), covar=tensor([0.0566, 0.0888, 0.1343, 0.0349, 0.0857, 0.0272, 0.1161, 0.0414], + device='cuda:1'), in_proj_covar=tensor([0.0192, 0.0211, 0.0179, 0.0240, 0.0180, 0.0240, 0.0191, 0.0187], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 02:34:53,168 INFO [train.py:898] (1/4) Epoch 11, batch 3000, loss[loss=0.1949, simple_loss=0.277, pruned_loss=0.05638, over 18294.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2731, pruned_loss=0.05171, over 3572933.18 frames. ], batch size: 49, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:34:53,169 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 02:35:05,601 INFO [train.py:932] (1/4) Epoch 11, validation: loss=0.1587, simple_loss=0.2603, pruned_loss=0.02852, over 944034.00 frames. +2023-03-09 02:35:05,602 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 02:35:10,778 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39345.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:35:13,865 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.978e+02 3.242e+02 3.927e+02 4.658e+02 9.416e+02, threshold=7.854e+02, percent-clipped=4.0 +2023-03-09 02:35:33,720 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.01 vs. limit=2.0 +2023-03-09 02:36:04,359 INFO [train.py:898] (1/4) Epoch 11, batch 3050, loss[loss=0.1699, simple_loss=0.2446, pruned_loss=0.04757, over 17664.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2732, pruned_loss=0.05185, over 3565408.11 frames. ], batch size: 39, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:36:18,501 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1328, 5.6373, 5.2061, 5.4758, 5.2072, 5.2386, 5.7433, 5.6481], + device='cuda:1'), covar=tensor([0.1256, 0.0762, 0.0602, 0.0681, 0.1475, 0.0588, 0.0492, 0.0641], + device='cuda:1'), in_proj_covar=tensor([0.0523, 0.0434, 0.0326, 0.0462, 0.0639, 0.0466, 0.0607, 0.0452], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 02:37:04,060 INFO [train.py:898] (1/4) Epoch 11, batch 3100, loss[loss=0.1852, simple_loss=0.2632, pruned_loss=0.0536, over 18393.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.273, pruned_loss=0.05171, over 3575378.41 frames. ], batch size: 50, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:37:12,129 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.117e+02 3.287e+02 3.708e+02 4.469e+02 1.141e+03, threshold=7.415e+02, percent-clipped=2.0 +2023-03-09 02:37:30,839 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39463.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:37:46,778 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.4552, 3.5817, 4.8693, 4.0752, 3.1470, 2.8711, 4.2795, 5.0761], + device='cuda:1'), covar=tensor([0.0872, 0.1530, 0.0142, 0.0397, 0.0889, 0.1128, 0.0367, 0.0241], + device='cuda:1'), in_proj_covar=tensor([0.0138, 0.0246, 0.0105, 0.0165, 0.0179, 0.0178, 0.0174, 0.0148], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 02:37:49,624 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39479.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:37:50,927 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.51 vs. limit=5.0 +2023-03-09 02:38:02,612 INFO [train.py:898] (1/4) Epoch 11, batch 3150, loss[loss=0.2026, simple_loss=0.2899, pruned_loss=0.05763, over 18324.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2733, pruned_loss=0.05196, over 3580291.58 frames. ], batch size: 54, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:38:42,101 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39524.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:38:45,258 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39527.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:38:58,446 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-09 02:39:01,993 INFO [train.py:898] (1/4) Epoch 11, batch 3200, loss[loss=0.1824, simple_loss=0.2739, pruned_loss=0.04542, over 18626.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2722, pruned_loss=0.05125, over 3591852.82 frames. ], batch size: 52, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:39:02,143 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39541.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:39:09,610 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.046e+02 3.172e+02 3.769e+02 4.644e+02 9.591e+02, threshold=7.537e+02, percent-clipped=4.0 +2023-03-09 02:39:53,844 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8972, 4.4699, 4.6159, 3.3177, 3.6849, 3.6521, 2.2859, 2.2607], + device='cuda:1'), covar=tensor([0.0159, 0.0158, 0.0068, 0.0260, 0.0293, 0.0140, 0.0815, 0.0884], + device='cuda:1'), in_proj_covar=tensor([0.0058, 0.0048, 0.0047, 0.0059, 0.0078, 0.0055, 0.0071, 0.0077], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-09 02:40:01,204 INFO [train.py:898] (1/4) Epoch 11, batch 3250, loss[loss=0.1938, simple_loss=0.2787, pruned_loss=0.05443, over 18628.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2725, pruned_loss=0.05156, over 3588407.35 frames. ], batch size: 52, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:40:20,697 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8435, 3.1577, 4.2825, 3.8510, 2.8471, 4.7252, 4.0488, 3.0506], + device='cuda:1'), covar=tensor([0.0417, 0.1231, 0.0240, 0.0397, 0.1301, 0.0155, 0.0423, 0.0938], + device='cuda:1'), in_proj_covar=tensor([0.0185, 0.0218, 0.0150, 0.0142, 0.0207, 0.0181, 0.0206, 0.0188], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 02:40:48,267 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6227, 4.2030, 2.6986, 3.9042, 3.9916, 4.1801, 4.0344, 2.5792], + device='cuda:1'), covar=tensor([0.0174, 0.0060, 0.0662, 0.0201, 0.0081, 0.0077, 0.0096, 0.0882], + device='cuda:1'), in_proj_covar=tensor([0.0077, 0.0066, 0.0086, 0.0080, 0.0075, 0.0065, 0.0075, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 02:40:58,395 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39639.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 02:40:59,366 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39640.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:41:00,287 INFO [train.py:898] (1/4) Epoch 11, batch 3300, loss[loss=0.1771, simple_loss=0.2673, pruned_loss=0.04342, over 18374.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2716, pruned_loss=0.05148, over 3589122.42 frames. ], batch size: 50, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:41:08,716 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.189e+02 3.111e+02 3.675e+02 4.353e+02 7.934e+02, threshold=7.351e+02, percent-clipped=2.0 +2023-03-09 02:41:59,387 INFO [train.py:898] (1/4) Epoch 11, batch 3350, loss[loss=0.1979, simple_loss=0.281, pruned_loss=0.05745, over 18365.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2723, pruned_loss=0.05177, over 3582166.72 frames. ], batch size: 50, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:42:10,658 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39700.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 02:42:58,360 INFO [train.py:898] (1/4) Epoch 11, batch 3400, loss[loss=0.1902, simple_loss=0.2832, pruned_loss=0.04854, over 18296.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2724, pruned_loss=0.05155, over 3581212.58 frames. ], batch size: 57, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:43:06,472 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.021e+02 3.201e+02 3.760e+02 4.727e+02 8.419e+02, threshold=7.521e+02, percent-clipped=1.0 +2023-03-09 02:43:29,995 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39767.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:43:58,013 INFO [train.py:898] (1/4) Epoch 11, batch 3450, loss[loss=0.1696, simple_loss=0.2545, pruned_loss=0.04235, over 18262.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2718, pruned_loss=0.05116, over 3577681.98 frames. ], batch size: 47, lr: 1.01e-02, grad_scale: 8.0 +2023-03-09 02:44:02,285 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.78 vs. limit=5.0 +2023-03-09 02:44:26,908 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39815.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:44:31,380 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39819.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:44:35,285 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.22 vs. limit=5.0 +2023-03-09 02:44:42,908 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39828.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:44:57,115 INFO [train.py:898] (1/4) Epoch 11, batch 3500, loss[loss=0.2248, simple_loss=0.299, pruned_loss=0.07527, over 18351.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2713, pruned_loss=0.05085, over 3590417.30 frames. ], batch size: 56, lr: 1.01e-02, grad_scale: 8.0 +2023-03-09 02:44:57,419 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39841.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:45:05,097 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.228e+02 3.240e+02 3.890e+02 4.468e+02 8.251e+02, threshold=7.780e+02, percent-clipped=2.0 +2023-03-09 02:45:24,044 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.77 vs. limit=5.0 +2023-03-09 02:45:37,780 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39876.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:45:41,633 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3269, 4.2112, 2.3334, 4.3029, 5.3041, 2.8042, 3.6391, 3.4799], + device='cuda:1'), covar=tensor([0.0085, 0.1198, 0.1481, 0.0424, 0.0046, 0.1071, 0.0635, 0.0961], + device='cuda:1'), in_proj_covar=tensor([0.0119, 0.0234, 0.0193, 0.0186, 0.0091, 0.0175, 0.0202, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 02:45:52,191 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39889.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:45:54,263 INFO [train.py:898] (1/4) Epoch 11, batch 3550, loss[loss=0.217, simple_loss=0.3058, pruned_loss=0.0641, over 18400.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2718, pruned_loss=0.05105, over 3572447.61 frames. ], batch size: 52, lr: 1.01e-02, grad_scale: 8.0 +2023-03-09 02:46:12,870 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-09 02:46:48,071 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39940.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:46:48,863 INFO [train.py:898] (1/4) Epoch 11, batch 3600, loss[loss=0.1981, simple_loss=0.2879, pruned_loss=0.05421, over 18299.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2725, pruned_loss=0.05124, over 3581802.93 frames. ], batch size: 57, lr: 1.01e-02, grad_scale: 8.0 +2023-03-09 02:46:55,874 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.247e+02 3.210e+02 3.696e+02 4.808e+02 8.251e+02, threshold=7.392e+02, percent-clipped=2.0 +2023-03-09 02:47:53,914 INFO [train.py:898] (1/4) Epoch 12, batch 0, loss[loss=0.1745, simple_loss=0.2516, pruned_loss=0.04865, over 18431.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2516, pruned_loss=0.04865, over 18431.00 frames. ], batch size: 43, lr: 9.70e-03, grad_scale: 8.0 +2023-03-09 02:47:53,914 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 02:48:01,094 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7390, 4.4340, 2.6795, 4.4230, 4.1309, 4.4515, 4.2986, 2.4955], + device='cuda:1'), covar=tensor([0.0190, 0.0067, 0.0716, 0.0083, 0.0101, 0.0076, 0.0105, 0.0987], + device='cuda:1'), in_proj_covar=tensor([0.0079, 0.0067, 0.0088, 0.0082, 0.0077, 0.0066, 0.0076, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0004, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 02:48:04,619 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5357, 5.9702, 5.4747, 5.8413, 5.5885, 5.5928, 6.0340, 5.9471], + device='cuda:1'), covar=tensor([0.0952, 0.0537, 0.0258, 0.0452, 0.1158, 0.0572, 0.0405, 0.0518], + device='cuda:1'), in_proj_covar=tensor([0.0517, 0.0429, 0.0324, 0.0455, 0.0630, 0.0461, 0.0605, 0.0452], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 02:48:05,854 INFO [train.py:932] (1/4) Epoch 12, validation: loss=0.1577, simple_loss=0.2601, pruned_loss=0.02771, over 944034.00 frames. +2023-03-09 02:48:05,855 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 02:48:21,419 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39988.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:48:29,613 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39995.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 02:49:09,607 INFO [train.py:898] (1/4) Epoch 12, batch 50, loss[loss=0.1895, simple_loss=0.2695, pruned_loss=0.05472, over 18398.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2708, pruned_loss=0.05093, over 815960.44 frames. ], batch size: 42, lr: 9.69e-03, grad_scale: 8.0 +2023-03-09 02:49:35,920 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.813e+02 3.496e+02 4.076e+02 5.396e+02 1.029e+03, threshold=8.152e+02, percent-clipped=4.0 +2023-03-09 02:50:08,399 INFO [train.py:898] (1/4) Epoch 12, batch 100, loss[loss=0.1906, simple_loss=0.2835, pruned_loss=0.04883, over 18457.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2711, pruned_loss=0.05055, over 1439874.23 frames. ], batch size: 59, lr: 9.69e-03, grad_scale: 4.0 +2023-03-09 02:50:15,831 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8851, 4.3983, 4.6178, 3.2233, 3.7426, 3.4602, 2.4257, 2.2980], + device='cuda:1'), covar=tensor([0.0187, 0.0145, 0.0059, 0.0281, 0.0287, 0.0203, 0.0734, 0.0881], + device='cuda:1'), in_proj_covar=tensor([0.0059, 0.0047, 0.0047, 0.0059, 0.0078, 0.0056, 0.0071, 0.0078], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-09 02:50:41,676 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-09 02:51:00,683 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40119.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:51:05,145 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40123.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:51:07,306 INFO [train.py:898] (1/4) Epoch 12, batch 150, loss[loss=0.1882, simple_loss=0.2703, pruned_loss=0.05305, over 18375.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2715, pruned_loss=0.05104, over 1919969.11 frames. ], batch size: 50, lr: 9.68e-03, grad_scale: 4.0 +2023-03-09 02:51:36,219 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.055e+02 3.093e+02 3.775e+02 4.453e+02 9.107e+02, threshold=7.551e+02, percent-clipped=1.0 +2023-03-09 02:51:57,416 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40167.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:52:01,989 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40171.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:52:06,340 INFO [train.py:898] (1/4) Epoch 12, batch 200, loss[loss=0.1998, simple_loss=0.289, pruned_loss=0.05533, over 18304.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2721, pruned_loss=0.05077, over 2300607.17 frames. ], batch size: 54, lr: 9.68e-03, grad_scale: 4.0 +2023-03-09 02:52:10,297 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3919, 3.2353, 1.9504, 4.1427, 2.8259, 4.0893, 1.9416, 3.5656], + device='cuda:1'), covar=tensor([0.0573, 0.0744, 0.1514, 0.0439, 0.0904, 0.0317, 0.1369, 0.0439], + device='cuda:1'), in_proj_covar=tensor([0.0193, 0.0212, 0.0180, 0.0239, 0.0181, 0.0243, 0.0194, 0.0186], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 02:52:17,945 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-03-09 02:52:29,943 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40195.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:52:58,338 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40218.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:53:05,982 INFO [train.py:898] (1/4) Epoch 12, batch 250, loss[loss=0.2007, simple_loss=0.2878, pruned_loss=0.05677, over 16074.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2721, pruned_loss=0.05053, over 2592087.13 frames. ], batch size: 94, lr: 9.67e-03, grad_scale: 4.0 +2023-03-09 02:53:33,645 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.098e+02 3.173e+02 3.973e+02 4.861e+02 1.364e+03, threshold=7.946e+02, percent-clipped=3.0 +2023-03-09 02:53:42,398 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40256.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:54:01,793 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40272.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:54:05,013 INFO [train.py:898] (1/4) Epoch 12, batch 300, loss[loss=0.2455, simple_loss=0.3171, pruned_loss=0.08694, over 12576.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2722, pruned_loss=0.05064, over 2811209.03 frames. ], batch size: 129, lr: 9.66e-03, grad_scale: 4.0 +2023-03-09 02:54:09,936 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40279.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:54:23,502 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8828, 3.0835, 4.3371, 4.0689, 2.7572, 4.8759, 4.0712, 2.9903], + device='cuda:1'), covar=tensor([0.0372, 0.1372, 0.0222, 0.0308, 0.1621, 0.0155, 0.0518, 0.1049], + device='cuda:1'), in_proj_covar=tensor([0.0192, 0.0228, 0.0158, 0.0148, 0.0217, 0.0190, 0.0215, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 02:54:28,446 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40295.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 02:54:42,591 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.77 vs. limit=5.0 +2023-03-09 02:54:57,073 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5085, 5.2078, 5.7549, 5.6064, 5.4005, 6.2980, 5.9150, 5.6583], + device='cuda:1'), covar=tensor([0.0890, 0.0670, 0.0642, 0.0590, 0.1335, 0.0603, 0.0554, 0.1505], + device='cuda:1'), in_proj_covar=tensor([0.0310, 0.0241, 0.0251, 0.0256, 0.0290, 0.0356, 0.0239, 0.0350], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:1') +2023-03-09 02:55:02,199 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8507, 4.8074, 4.9470, 4.6632, 4.6318, 4.7224, 5.1025, 5.0193], + device='cuda:1'), covar=tensor([0.0059, 0.0063, 0.0057, 0.0087, 0.0057, 0.0107, 0.0057, 0.0070], + device='cuda:1'), in_proj_covar=tensor([0.0080, 0.0056, 0.0060, 0.0075, 0.0061, 0.0087, 0.0072, 0.0072], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 02:55:04,180 INFO [train.py:898] (1/4) Epoch 12, batch 350, loss[loss=0.1786, simple_loss=0.2693, pruned_loss=0.04394, over 18525.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2729, pruned_loss=0.05151, over 2968666.63 frames. ], batch size: 49, lr: 9.66e-03, grad_scale: 4.0 +2023-03-09 02:55:10,147 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40330.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:55:13,438 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40333.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:55:24,770 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40343.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 02:55:31,751 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.235e+02 3.021e+02 3.795e+02 4.582e+02 1.168e+03, threshold=7.590e+02, percent-clipped=5.0 +2023-03-09 02:55:52,851 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-09 02:56:01,944 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-09 02:56:02,404 INFO [train.py:898] (1/4) Epoch 12, batch 400, loss[loss=0.1817, simple_loss=0.2683, pruned_loss=0.04761, over 18395.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2731, pruned_loss=0.05157, over 3101954.68 frames. ], batch size: 48, lr: 9.65e-03, grad_scale: 8.0 +2023-03-09 02:56:21,602 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40391.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:56:59,649 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40423.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:57:01,657 INFO [train.py:898] (1/4) Epoch 12, batch 450, loss[loss=0.1955, simple_loss=0.288, pruned_loss=0.0515, over 18213.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2721, pruned_loss=0.05079, over 3222263.27 frames. ], batch size: 60, lr: 9.65e-03, grad_scale: 8.0 +2023-03-09 02:57:29,713 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.841e+02 3.105e+02 3.603e+02 4.102e+02 7.155e+02, threshold=7.205e+02, percent-clipped=0.0 +2023-03-09 02:57:51,757 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-03-09 02:57:55,815 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40471.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:57:56,001 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40471.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:58:00,091 INFO [train.py:898] (1/4) Epoch 12, batch 500, loss[loss=0.1797, simple_loss=0.2621, pruned_loss=0.04862, over 18411.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2711, pruned_loss=0.05048, over 3310971.95 frames. ], batch size: 48, lr: 9.64e-03, grad_scale: 8.0 +2023-03-09 02:58:01,676 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6210, 2.9603, 2.4515, 2.8636, 3.6393, 3.6157, 3.0283, 3.0573], + device='cuda:1'), covar=tensor([0.0184, 0.0248, 0.0587, 0.0337, 0.0170, 0.0146, 0.0331, 0.0335], + device='cuda:1'), in_proj_covar=tensor([0.0122, 0.0109, 0.0149, 0.0138, 0.0105, 0.0093, 0.0136, 0.0131], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 02:58:20,954 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40492.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:58:52,460 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40519.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:58:59,224 INFO [train.py:898] (1/4) Epoch 12, batch 550, loss[loss=0.1775, simple_loss=0.2656, pruned_loss=0.04465, over 18578.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2699, pruned_loss=0.04981, over 3385251.93 frames. ], batch size: 54, lr: 9.63e-03, grad_scale: 8.0 +2023-03-09 02:59:15,439 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7442, 3.5848, 3.4920, 3.0747, 3.3012, 2.7070, 2.7929, 3.7212], + device='cuda:1'), covar=tensor([0.0034, 0.0064, 0.0059, 0.0110, 0.0087, 0.0161, 0.0145, 0.0044], + device='cuda:1'), in_proj_covar=tensor([0.0099, 0.0122, 0.0105, 0.0155, 0.0107, 0.0152, 0.0156, 0.0089], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 02:59:26,877 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.116e+02 3.269e+02 4.036e+02 4.725e+02 9.923e+02, threshold=8.073e+02, percent-clipped=3.0 +2023-03-09 02:59:29,405 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40551.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:59:31,888 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40553.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:59:56,944 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40574.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:59:57,875 INFO [train.py:898] (1/4) Epoch 12, batch 600, loss[loss=0.1682, simple_loss=0.2464, pruned_loss=0.04501, over 18509.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2699, pruned_loss=0.04973, over 3433873.45 frames. ], batch size: 44, lr: 9.63e-03, grad_scale: 4.0 +2023-03-09 03:00:29,560 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 03:00:56,593 INFO [train.py:898] (1/4) Epoch 12, batch 650, loss[loss=0.1749, simple_loss=0.2642, pruned_loss=0.04282, over 18311.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2703, pruned_loss=0.04973, over 3469188.18 frames. ], batch size: 54, lr: 9.62e-03, grad_scale: 4.0 +2023-03-09 03:01:00,859 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40628.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:01:26,540 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.880e+02 2.745e+02 3.552e+02 4.267e+02 1.309e+03, threshold=7.104e+02, percent-clipped=1.0 +2023-03-09 03:01:28,021 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40651.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:01:55,475 INFO [train.py:898] (1/4) Epoch 12, batch 700, loss[loss=0.2042, simple_loss=0.2966, pruned_loss=0.05587, over 18236.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2709, pruned_loss=0.05006, over 3499200.83 frames. ], batch size: 60, lr: 9.62e-03, grad_scale: 4.0 +2023-03-09 03:02:08,733 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40686.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:02:39,430 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40712.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:02:54,261 INFO [train.py:898] (1/4) Epoch 12, batch 750, loss[loss=0.2023, simple_loss=0.2865, pruned_loss=0.05904, over 18094.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2706, pruned_loss=0.04985, over 3531687.81 frames. ], batch size: 62, lr: 9.61e-03, grad_scale: 4.0 +2023-03-09 03:03:25,288 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.144e+02 3.250e+02 3.776e+02 4.415e+02 7.567e+02, threshold=7.551e+02, percent-clipped=1.0 +2023-03-09 03:03:47,058 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1408, 3.0682, 1.8997, 3.7757, 2.5948, 3.5739, 2.0749, 3.1099], + device='cuda:1'), covar=tensor([0.0552, 0.0784, 0.1440, 0.0520, 0.0841, 0.0334, 0.1290, 0.0471], + device='cuda:1'), in_proj_covar=tensor([0.0192, 0.0213, 0.0179, 0.0242, 0.0180, 0.0245, 0.0192, 0.0187], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 03:03:52,298 INFO [train.py:898] (1/4) Epoch 12, batch 800, loss[loss=0.2158, simple_loss=0.2933, pruned_loss=0.06922, over 12360.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2706, pruned_loss=0.04995, over 3534912.85 frames. ], batch size: 129, lr: 9.61e-03, grad_scale: 4.0 +2023-03-09 03:04:51,199 INFO [train.py:898] (1/4) Epoch 12, batch 850, loss[loss=0.1896, simple_loss=0.2749, pruned_loss=0.05213, over 18245.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2698, pruned_loss=0.04972, over 3561364.70 frames. ], batch size: 47, lr: 9.60e-03, grad_scale: 4.0 +2023-03-09 03:05:17,938 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40848.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:05:21,110 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.184e+02 3.104e+02 3.641e+02 4.538e+02 1.053e+03, threshold=7.281e+02, percent-clipped=3.0 +2023-03-09 03:05:21,446 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40851.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:05:38,597 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40865.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:05:48,796 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40874.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:05:49,595 INFO [train.py:898] (1/4) Epoch 12, batch 900, loss[loss=0.1617, simple_loss=0.2547, pruned_loss=0.03437, over 18495.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2692, pruned_loss=0.04958, over 3575307.67 frames. ], batch size: 51, lr: 9.59e-03, grad_scale: 4.0 +2023-03-09 03:06:18,249 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40899.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:06:19,575 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40900.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:06:20,576 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2270, 5.4196, 2.8355, 5.2781, 5.2325, 5.5243, 5.2096, 2.5584], + device='cuda:1'), covar=tensor([0.0146, 0.0069, 0.0711, 0.0074, 0.0061, 0.0068, 0.0102, 0.0973], + device='cuda:1'), in_proj_covar=tensor([0.0079, 0.0068, 0.0088, 0.0082, 0.0076, 0.0067, 0.0078, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 03:06:45,189 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40922.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:06:48,430 INFO [train.py:898] (1/4) Epoch 12, batch 950, loss[loss=0.1655, simple_loss=0.2615, pruned_loss=0.03475, over 18571.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2688, pruned_loss=0.04946, over 3584701.36 frames. ], batch size: 54, lr: 9.59e-03, grad_scale: 4.0 +2023-03-09 03:06:49,923 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40926.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:06:52,129 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40928.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:07:04,302 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9237, 5.4452, 5.4953, 5.4820, 5.0013, 5.3576, 4.7509, 5.3454], + device='cuda:1'), covar=tensor([0.0238, 0.0253, 0.0165, 0.0274, 0.0313, 0.0193, 0.0989, 0.0263], + device='cuda:1'), in_proj_covar=tensor([0.0181, 0.0224, 0.0215, 0.0259, 0.0228, 0.0231, 0.0289, 0.0216], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:1') +2023-03-09 03:07:18,903 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.424e+02 3.344e+02 4.191e+02 5.324e+02 1.167e+03, threshold=8.382e+02, percent-clipped=6.0 +2023-03-09 03:07:31,614 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40961.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 03:07:37,122 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1805, 5.4672, 2.9008, 5.2631, 5.1769, 5.4960, 5.2608, 2.9254], + device='cuda:1'), covar=tensor([0.0152, 0.0057, 0.0728, 0.0067, 0.0064, 0.0066, 0.0082, 0.0855], + device='cuda:1'), in_proj_covar=tensor([0.0078, 0.0068, 0.0088, 0.0083, 0.0076, 0.0067, 0.0078, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 03:07:46,857 INFO [train.py:898] (1/4) Epoch 12, batch 1000, loss[loss=0.1933, simple_loss=0.2856, pruned_loss=0.05054, over 18630.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2683, pruned_loss=0.04939, over 3587667.97 frames. ], batch size: 52, lr: 9.58e-03, grad_scale: 4.0 +2023-03-09 03:07:47,970 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40976.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:07:59,458 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40986.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:08:11,941 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40997.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:08:24,355 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41007.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:08:45,140 INFO [train.py:898] (1/4) Epoch 12, batch 1050, loss[loss=0.1929, simple_loss=0.2835, pruned_loss=0.0511, over 18093.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2696, pruned_loss=0.04985, over 3579881.11 frames. ], batch size: 62, lr: 9.58e-03, grad_scale: 4.0 +2023-03-09 03:08:55,723 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41034.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:09:08,689 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41045.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:09:14,993 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.197e+02 3.189e+02 3.617e+02 4.210e+02 9.012e+02, threshold=7.234e+02, percent-clipped=1.0 +2023-03-09 03:09:23,986 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41058.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:09:43,936 INFO [train.py:898] (1/4) Epoch 12, batch 1100, loss[loss=0.1723, simple_loss=0.2485, pruned_loss=0.04808, over 18392.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2698, pruned_loss=0.04989, over 3589376.95 frames. ], batch size: 42, lr: 9.57e-03, grad_scale: 4.0 +2023-03-09 03:09:54,683 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-09 03:10:15,209 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41102.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:10:19,987 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41106.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:10:24,332 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6735, 2.5926, 2.5885, 2.3700, 2.6108, 2.1879, 2.2165, 2.6726], + device='cuda:1'), covar=tensor([0.0046, 0.0072, 0.0056, 0.0099, 0.0071, 0.0132, 0.0147, 0.0055], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0124, 0.0108, 0.0159, 0.0110, 0.0155, 0.0161, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 03:10:41,770 INFO [train.py:898] (1/4) Epoch 12, batch 1150, loss[loss=0.1623, simple_loss=0.2446, pruned_loss=0.03998, over 18186.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2702, pruned_loss=0.05004, over 3587807.65 frames. ], batch size: 44, lr: 9.56e-03, grad_scale: 4.0 +2023-03-09 03:10:56,448 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.1779, 2.9319, 3.9691, 3.5596, 2.9549, 2.8476, 3.5354, 4.0187], + device='cuda:1'), covar=tensor([0.0845, 0.1402, 0.0170, 0.0381, 0.0802, 0.0954, 0.0397, 0.0363], + device='cuda:1'), in_proj_covar=tensor([0.0137, 0.0248, 0.0107, 0.0162, 0.0180, 0.0177, 0.0175, 0.0152], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 03:10:56,659 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-03-09 03:11:07,714 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41148.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:11:11,014 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.262e+02 3.020e+02 3.568e+02 4.440e+02 1.142e+03, threshold=7.137e+02, percent-clipped=4.0 +2023-03-09 03:11:25,739 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41163.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:11:39,959 INFO [train.py:898] (1/4) Epoch 12, batch 1200, loss[loss=0.2025, simple_loss=0.2881, pruned_loss=0.0584, over 18357.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2708, pruned_loss=0.05065, over 3584328.18 frames. ], batch size: 56, lr: 9.56e-03, grad_scale: 8.0 +2023-03-09 03:12:03,585 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41196.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:12:10,832 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0004, 2.5467, 2.1527, 2.3977, 3.1341, 3.1111, 2.7738, 2.5750], + device='cuda:1'), covar=tensor([0.0188, 0.0233, 0.0584, 0.0362, 0.0175, 0.0134, 0.0358, 0.0323], + device='cuda:1'), in_proj_covar=tensor([0.0121, 0.0110, 0.0150, 0.0139, 0.0107, 0.0093, 0.0137, 0.0130], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 03:12:18,177 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5296, 2.0703, 2.5896, 2.4921, 3.3527, 5.1247, 4.6324, 4.1056], + device='cuda:1'), covar=tensor([0.1315, 0.2049, 0.2435, 0.1547, 0.1781, 0.0117, 0.0361, 0.0452], + device='cuda:1'), in_proj_covar=tensor([0.0248, 0.0306, 0.0327, 0.0252, 0.0367, 0.0190, 0.0265, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 03:12:29,065 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.3398, 3.5575, 5.0705, 4.3095, 3.3297, 2.9928, 4.2304, 5.2204], + device='cuda:1'), covar=tensor([0.0868, 0.1661, 0.0092, 0.0372, 0.0871, 0.1094, 0.0393, 0.0142], + device='cuda:1'), in_proj_covar=tensor([0.0138, 0.0250, 0.0107, 0.0163, 0.0181, 0.0178, 0.0176, 0.0153], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 03:12:34,361 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41221.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:12:38,731 INFO [train.py:898] (1/4) Epoch 12, batch 1250, loss[loss=0.1615, simple_loss=0.2399, pruned_loss=0.04152, over 17661.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2703, pruned_loss=0.05027, over 3591810.44 frames. ], batch size: 39, lr: 9.55e-03, grad_scale: 8.0 +2023-03-09 03:12:39,567 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.76 vs. limit=5.0 +2023-03-09 03:13:08,569 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.122e+02 3.147e+02 3.624e+02 4.404e+02 8.408e+02, threshold=7.247e+02, percent-clipped=2.0 +2023-03-09 03:13:14,380 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41256.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 03:13:37,220 INFO [train.py:898] (1/4) Epoch 12, batch 1300, loss[loss=0.2361, simple_loss=0.3207, pruned_loss=0.07572, over 18397.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2713, pruned_loss=0.05043, over 3594124.44 frames. ], batch size: 52, lr: 9.55e-03, grad_scale: 8.0 +2023-03-09 03:14:14,009 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41307.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:14:19,366 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4728, 3.3013, 2.0048, 4.2494, 2.7593, 4.2413, 2.1575, 3.5848], + device='cuda:1'), covar=tensor([0.0537, 0.0755, 0.1358, 0.0383, 0.0883, 0.0290, 0.1203, 0.0402], + device='cuda:1'), in_proj_covar=tensor([0.0193, 0.0214, 0.0180, 0.0245, 0.0183, 0.0247, 0.0191, 0.0187], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 03:14:31,774 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-09 03:14:35,261 INFO [train.py:898] (1/4) Epoch 12, batch 1350, loss[loss=0.1999, simple_loss=0.2849, pruned_loss=0.05743, over 17287.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2711, pruned_loss=0.05028, over 3590471.09 frames. ], batch size: 78, lr: 9.54e-03, grad_scale: 8.0 +2023-03-09 03:15:05,736 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.782e+02 3.011e+02 3.906e+02 4.787e+02 1.227e+03, threshold=7.812e+02, percent-clipped=6.0 +2023-03-09 03:15:08,214 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41353.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:15:10,500 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41355.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:15:34,042 INFO [train.py:898] (1/4) Epoch 12, batch 1400, loss[loss=0.1609, simple_loss=0.2444, pruned_loss=0.03869, over 18472.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2718, pruned_loss=0.0505, over 3585863.78 frames. ], batch size: 44, lr: 9.54e-03, grad_scale: 8.0 +2023-03-09 03:16:04,800 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41401.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:16:15,132 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0349, 5.0039, 5.0352, 4.8675, 4.7999, 4.9328, 5.2233, 5.2003], + device='cuda:1'), covar=tensor([0.0054, 0.0055, 0.0059, 0.0079, 0.0060, 0.0095, 0.0067, 0.0081], + device='cuda:1'), in_proj_covar=tensor([0.0081, 0.0057, 0.0061, 0.0076, 0.0064, 0.0089, 0.0073, 0.0073], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 03:16:32,800 INFO [train.py:898] (1/4) Epoch 12, batch 1450, loss[loss=0.1664, simple_loss=0.2444, pruned_loss=0.04415, over 18492.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2709, pruned_loss=0.05003, over 3595622.70 frames. ], batch size: 44, lr: 9.53e-03, grad_scale: 8.0 +2023-03-09 03:16:35,356 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6579, 3.6697, 4.9089, 4.2937, 3.1999, 2.9050, 4.1476, 5.1226], + device='cuda:1'), covar=tensor([0.0792, 0.1501, 0.0147, 0.0367, 0.0906, 0.1089, 0.0404, 0.0167], + device='cuda:1'), in_proj_covar=tensor([0.0138, 0.0248, 0.0109, 0.0163, 0.0180, 0.0178, 0.0176, 0.0152], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 03:17:03,288 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.105e+02 2.980e+02 3.633e+02 4.455e+02 8.287e+02, threshold=7.266e+02, percent-clipped=1.0 +2023-03-09 03:17:10,803 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=5.28 vs. limit=5.0 +2023-03-09 03:17:11,395 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41458.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:17:30,615 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7507, 2.2823, 2.7586, 2.8336, 3.4223, 5.1617, 4.7427, 4.1569], + device='cuda:1'), covar=tensor([0.1216, 0.2009, 0.2390, 0.1357, 0.1858, 0.0104, 0.0338, 0.0455], + device='cuda:1'), in_proj_covar=tensor([0.0250, 0.0307, 0.0327, 0.0251, 0.0368, 0.0191, 0.0265, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 03:17:31,244 INFO [train.py:898] (1/4) Epoch 12, batch 1500, loss[loss=0.19, simple_loss=0.2685, pruned_loss=0.05571, over 18267.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2714, pruned_loss=0.0503, over 3590744.61 frames. ], batch size: 45, lr: 9.52e-03, grad_scale: 8.0 +2023-03-09 03:18:24,461 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41521.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:18:28,654 INFO [train.py:898] (1/4) Epoch 12, batch 1550, loss[loss=0.186, simple_loss=0.2744, pruned_loss=0.04878, over 16020.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2703, pruned_loss=0.04997, over 3581337.10 frames. ], batch size: 94, lr: 9.52e-03, grad_scale: 8.0 +2023-03-09 03:18:55,099 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4442, 3.3085, 1.9252, 4.1971, 2.8312, 4.0714, 2.4860, 3.5609], + device='cuda:1'), covar=tensor([0.0545, 0.0769, 0.1534, 0.0469, 0.0867, 0.0314, 0.1092, 0.0420], + device='cuda:1'), in_proj_covar=tensor([0.0193, 0.0213, 0.0180, 0.0243, 0.0180, 0.0245, 0.0192, 0.0186], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 03:19:00,406 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.006e+02 3.193e+02 3.688e+02 4.708e+02 1.427e+03, threshold=7.376e+02, percent-clipped=3.0 +2023-03-09 03:19:06,246 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41556.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 03:19:20,725 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41569.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:19:27,335 INFO [train.py:898] (1/4) Epoch 12, batch 1600, loss[loss=0.153, simple_loss=0.2377, pruned_loss=0.03419, over 18394.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2691, pruned_loss=0.04973, over 3576865.39 frames. ], batch size: 42, lr: 9.51e-03, grad_scale: 8.0 +2023-03-09 03:20:03,013 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41604.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:20:11,045 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2134, 5.2655, 4.3586, 5.0802, 5.1981, 4.6301, 5.0310, 4.7729], + device='cuda:1'), covar=tensor([0.0757, 0.0694, 0.2721, 0.1252, 0.0762, 0.0616, 0.0748, 0.1301], + device='cuda:1'), in_proj_covar=tensor([0.0419, 0.0489, 0.0634, 0.0375, 0.0361, 0.0436, 0.0471, 0.0600], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 03:20:18,045 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9566, 3.8380, 5.1297, 3.1638, 4.3262, 2.7177, 3.0969, 1.9038], + device='cuda:1'), covar=tensor([0.0898, 0.0721, 0.0078, 0.0662, 0.0542, 0.2169, 0.2343, 0.1804], + device='cuda:1'), in_proj_covar=tensor([0.0198, 0.0221, 0.0123, 0.0175, 0.0233, 0.0248, 0.0292, 0.0213], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 03:20:26,594 INFO [train.py:898] (1/4) Epoch 12, batch 1650, loss[loss=0.1649, simple_loss=0.2463, pruned_loss=0.04175, over 18376.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2697, pruned_loss=0.05003, over 3572759.52 frames. ], batch size: 46, lr: 9.51e-03, grad_scale: 8.0 +2023-03-09 03:20:58,273 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.922e+02 3.190e+02 3.689e+02 4.449e+02 1.002e+03, threshold=7.378e+02, percent-clipped=1.0 +2023-03-09 03:21:00,757 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41653.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:21:02,050 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41654.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:21:25,757 INFO [train.py:898] (1/4) Epoch 12, batch 1700, loss[loss=0.1805, simple_loss=0.2686, pruned_loss=0.04625, over 18375.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2698, pruned_loss=0.05004, over 3580151.69 frames. ], batch size: 50, lr: 9.50e-03, grad_scale: 8.0 +2023-03-09 03:21:43,244 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6790, 2.3697, 2.6466, 2.7959, 3.5197, 5.1644, 4.8302, 3.9431], + device='cuda:1'), covar=tensor([0.1268, 0.1915, 0.2465, 0.1378, 0.1696, 0.0108, 0.0298, 0.0538], + device='cuda:1'), in_proj_covar=tensor([0.0249, 0.0307, 0.0325, 0.0250, 0.0366, 0.0191, 0.0263, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 03:21:54,900 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.90 vs. limit=2.0 +2023-03-09 03:21:56,914 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8118, 3.8205, 5.0353, 2.9153, 4.2859, 2.7561, 3.0468, 1.9354], + device='cuda:1'), covar=tensor([0.0957, 0.0735, 0.0098, 0.0741, 0.0532, 0.2128, 0.2439, 0.1720], + device='cuda:1'), in_proj_covar=tensor([0.0195, 0.0217, 0.0122, 0.0171, 0.0228, 0.0243, 0.0286, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 03:21:57,754 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41701.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:21:57,828 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41701.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:22:06,977 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4968, 5.2417, 5.6722, 5.4601, 5.3718, 6.1577, 5.8015, 5.4549], + device='cuda:1'), covar=tensor([0.0892, 0.0501, 0.0566, 0.0599, 0.1362, 0.0653, 0.0583, 0.1404], + device='cuda:1'), in_proj_covar=tensor([0.0311, 0.0237, 0.0254, 0.0258, 0.0296, 0.0363, 0.0241, 0.0352], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:1') +2023-03-09 03:22:09,130 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7414, 5.2668, 5.2387, 5.2969, 4.7985, 5.1283, 4.4934, 5.1482], + device='cuda:1'), covar=tensor([0.0210, 0.0281, 0.0189, 0.0276, 0.0334, 0.0206, 0.1176, 0.0272], + device='cuda:1'), in_proj_covar=tensor([0.0180, 0.0227, 0.0216, 0.0261, 0.0228, 0.0231, 0.0292, 0.0219], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:1') +2023-03-09 03:22:13,851 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41715.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 03:22:24,613 INFO [train.py:898] (1/4) Epoch 12, batch 1750, loss[loss=0.1634, simple_loss=0.2484, pruned_loss=0.03919, over 18294.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2695, pruned_loss=0.04992, over 3581849.21 frames. ], batch size: 49, lr: 9.50e-03, grad_scale: 8.0 +2023-03-09 03:22:53,562 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41749.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:22:56,197 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.910e+02 3.102e+02 3.582e+02 4.165e+02 6.416e+02, threshold=7.165e+02, percent-clipped=1.0 +2023-03-09 03:23:04,318 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41758.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:23:22,839 INFO [train.py:898] (1/4) Epoch 12, batch 1800, loss[loss=0.1735, simple_loss=0.2619, pruned_loss=0.04252, over 18377.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2696, pruned_loss=0.04977, over 3582587.96 frames. ], batch size: 50, lr: 9.49e-03, grad_scale: 8.0 +2023-03-09 03:23:59,577 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41806.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:24:20,792 INFO [train.py:898] (1/4) Epoch 12, batch 1850, loss[loss=0.1721, simple_loss=0.2613, pruned_loss=0.04145, over 18396.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2696, pruned_loss=0.04987, over 3568641.58 frames. ], batch size: 52, lr: 9.49e-03, grad_scale: 8.0 +2023-03-09 03:24:41,324 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-03-09 03:24:51,661 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.068e+02 3.165e+02 3.584e+02 4.367e+02 1.390e+03, threshold=7.168e+02, percent-clipped=5.0 +2023-03-09 03:25:19,163 INFO [train.py:898] (1/4) Epoch 12, batch 1900, loss[loss=0.2076, simple_loss=0.2972, pruned_loss=0.05899, over 17940.00 frames. ], tot_loss[loss=0.185, simple_loss=0.27, pruned_loss=0.05002, over 3575453.60 frames. ], batch size: 65, lr: 9.48e-03, grad_scale: 8.0 +2023-03-09 03:25:56,904 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41906.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:26:17,857 INFO [train.py:898] (1/4) Epoch 12, batch 1950, loss[loss=0.1779, simple_loss=0.2543, pruned_loss=0.05073, over 18252.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2704, pruned_loss=0.04995, over 3586058.21 frames. ], batch size: 45, lr: 9.47e-03, grad_scale: 8.0 +2023-03-09 03:26:44,531 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41948.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 03:26:47,531 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.082e+02 3.043e+02 3.852e+02 4.648e+02 1.107e+03, threshold=7.704e+02, percent-clipped=2.0 +2023-03-09 03:27:07,398 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41967.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:27:16,277 INFO [train.py:898] (1/4) Epoch 12, batch 2000, loss[loss=0.1669, simple_loss=0.2514, pruned_loss=0.04123, over 18314.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2706, pruned_loss=0.04996, over 3587306.48 frames. ], batch size: 49, lr: 9.47e-03, grad_scale: 8.0 +2023-03-09 03:28:02,163 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42009.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 03:28:03,161 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42010.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 03:28:20,378 INFO [train.py:898] (1/4) Epoch 12, batch 2050, loss[loss=0.2201, simple_loss=0.2987, pruned_loss=0.07074, over 12817.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2706, pruned_loss=0.04982, over 3587617.96 frames. ], batch size: 130, lr: 9.46e-03, grad_scale: 8.0 +2023-03-09 03:28:50,578 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.053e+02 3.014e+02 3.595e+02 4.305e+02 8.922e+02, threshold=7.191e+02, percent-clipped=2.0 +2023-03-09 03:29:19,695 INFO [train.py:898] (1/4) Epoch 12, batch 2100, loss[loss=0.2404, simple_loss=0.313, pruned_loss=0.08389, over 12871.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2717, pruned_loss=0.05042, over 3583126.81 frames. ], batch size: 129, lr: 9.46e-03, grad_scale: 4.0 +2023-03-09 03:30:18,703 INFO [train.py:898] (1/4) Epoch 12, batch 2150, loss[loss=0.1872, simple_loss=0.2728, pruned_loss=0.05083, over 18275.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2709, pruned_loss=0.04978, over 3583277.68 frames. ], batch size: 57, lr: 9.45e-03, grad_scale: 4.0 +2023-03-09 03:30:49,404 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.910e+02 3.158e+02 3.739e+02 4.529e+02 7.533e+02, threshold=7.477e+02, percent-clipped=1.0 +2023-03-09 03:31:17,039 INFO [train.py:898] (1/4) Epoch 12, batch 2200, loss[loss=0.1868, simple_loss=0.2796, pruned_loss=0.04701, over 18571.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2713, pruned_loss=0.05004, over 3585115.02 frames. ], batch size: 54, lr: 9.45e-03, grad_scale: 4.0 +2023-03-09 03:31:28,616 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5809, 6.1750, 5.5049, 5.9216, 5.7360, 5.5878, 6.1768, 6.1793], + device='cuda:1'), covar=tensor([0.1075, 0.0564, 0.0419, 0.0600, 0.1214, 0.0698, 0.0500, 0.0554], + device='cuda:1'), in_proj_covar=tensor([0.0539, 0.0445, 0.0341, 0.0475, 0.0658, 0.0484, 0.0630, 0.0471], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 03:32:15,654 INFO [train.py:898] (1/4) Epoch 12, batch 2250, loss[loss=0.1672, simple_loss=0.2462, pruned_loss=0.04411, over 18372.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2709, pruned_loss=0.04986, over 3589502.12 frames. ], batch size: 46, lr: 9.44e-03, grad_scale: 4.0 +2023-03-09 03:32:17,305 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2791, 4.3976, 2.3827, 4.3774, 5.3573, 2.7904, 3.7776, 3.9880], + device='cuda:1'), covar=tensor([0.0095, 0.0953, 0.1718, 0.0454, 0.0053, 0.1164, 0.0690, 0.0707], + device='cuda:1'), in_proj_covar=tensor([0.0122, 0.0238, 0.0194, 0.0189, 0.0094, 0.0176, 0.0204, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 03:32:46,342 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.218e+02 3.002e+02 3.427e+02 4.398e+02 7.070e+02, threshold=6.854e+02, percent-clipped=0.0 +2023-03-09 03:32:58,291 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42262.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:33:14,286 INFO [train.py:898] (1/4) Epoch 12, batch 2300, loss[loss=0.1867, simple_loss=0.2752, pruned_loss=0.04906, over 18387.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2713, pruned_loss=0.04996, over 3579505.47 frames. ], batch size: 52, lr: 9.44e-03, grad_scale: 4.0 +2023-03-09 03:33:42,955 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7542, 3.5338, 2.2193, 4.4721, 3.1416, 4.5038, 2.6878, 4.0077], + device='cuda:1'), covar=tensor([0.0468, 0.0675, 0.1371, 0.0359, 0.0765, 0.0205, 0.0993, 0.0357], + device='cuda:1'), in_proj_covar=tensor([0.0197, 0.0215, 0.0182, 0.0247, 0.0182, 0.0246, 0.0193, 0.0187], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 03:33:47,384 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42304.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 03:33:54,924 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42310.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 03:34:13,010 INFO [train.py:898] (1/4) Epoch 12, batch 2350, loss[loss=0.2275, simple_loss=0.3074, pruned_loss=0.0738, over 12466.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.272, pruned_loss=0.05022, over 3570536.63 frames. ], batch size: 130, lr: 9.43e-03, grad_scale: 4.0 +2023-03-09 03:34:43,296 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.195e+02 3.349e+02 3.901e+02 4.944e+02 8.097e+02, threshold=7.803e+02, percent-clipped=6.0 +2023-03-09 03:34:50,327 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42358.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:35:10,383 INFO [train.py:898] (1/4) Epoch 12, batch 2400, loss[loss=0.1974, simple_loss=0.2937, pruned_loss=0.05057, over 18261.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2721, pruned_loss=0.05038, over 3564784.03 frames. ], batch size: 57, lr: 9.42e-03, grad_scale: 8.0 +2023-03-09 03:35:15,953 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42379.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:35:19,280 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5795, 5.4866, 5.1162, 5.4626, 5.4399, 4.8404, 5.3943, 5.1518], + device='cuda:1'), covar=tensor([0.0427, 0.0422, 0.1361, 0.0849, 0.0640, 0.0454, 0.0425, 0.0977], + device='cuda:1'), in_proj_covar=tensor([0.0413, 0.0481, 0.0636, 0.0373, 0.0360, 0.0432, 0.0465, 0.0597], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 03:36:08,416 INFO [train.py:898] (1/4) Epoch 12, batch 2450, loss[loss=0.1922, simple_loss=0.2816, pruned_loss=0.0514, over 18310.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2708, pruned_loss=0.0498, over 3572926.98 frames. ], batch size: 54, lr: 9.42e-03, grad_scale: 8.0 +2023-03-09 03:36:27,202 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42440.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:36:40,614 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.166e+02 3.219e+02 3.774e+02 4.354e+02 8.177e+02, threshold=7.548e+02, percent-clipped=1.0 +2023-03-09 03:37:01,447 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.85 vs. limit=2.0 +2023-03-09 03:37:07,531 INFO [train.py:898] (1/4) Epoch 12, batch 2500, loss[loss=0.1804, simple_loss=0.2646, pruned_loss=0.04813, over 18273.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2704, pruned_loss=0.04918, over 3585767.02 frames. ], batch size: 49, lr: 9.41e-03, grad_scale: 8.0 +2023-03-09 03:37:44,351 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42506.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:38:06,074 INFO [train.py:898] (1/4) Epoch 12, batch 2550, loss[loss=0.1849, simple_loss=0.2771, pruned_loss=0.04639, over 18628.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2692, pruned_loss=0.04884, over 3597054.81 frames. ], batch size: 52, lr: 9.41e-03, grad_scale: 8.0 +2023-03-09 03:38:37,551 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.199e+02 3.182e+02 3.754e+02 4.508e+02 9.914e+02, threshold=7.507e+02, percent-clipped=4.0 +2023-03-09 03:38:49,210 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42562.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:38:54,915 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42567.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:38:59,151 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.4977, 3.5362, 4.9049, 4.3374, 3.1326, 2.7536, 4.2631, 5.0361], + device='cuda:1'), covar=tensor([0.0835, 0.1435, 0.0161, 0.0332, 0.0941, 0.1185, 0.0410, 0.0293], + device='cuda:1'), in_proj_covar=tensor([0.0137, 0.0249, 0.0110, 0.0164, 0.0179, 0.0176, 0.0176, 0.0155], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 03:39:04,474 INFO [train.py:898] (1/4) Epoch 12, batch 2600, loss[loss=0.1917, simple_loss=0.2809, pruned_loss=0.05121, over 18028.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2695, pruned_loss=0.04938, over 3581609.03 frames. ], batch size: 65, lr: 9.40e-03, grad_scale: 8.0 +2023-03-09 03:39:28,026 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5102, 3.6715, 4.9652, 4.3344, 3.4017, 2.8966, 4.4600, 5.1793], + device='cuda:1'), covar=tensor([0.0814, 0.1435, 0.0131, 0.0340, 0.0797, 0.1080, 0.0337, 0.0194], + device='cuda:1'), in_proj_covar=tensor([0.0137, 0.0248, 0.0110, 0.0164, 0.0178, 0.0174, 0.0175, 0.0153], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 03:39:35,395 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.72 vs. limit=2.0 +2023-03-09 03:39:39,330 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42604.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 03:39:46,071 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42610.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:40:03,352 INFO [train.py:898] (1/4) Epoch 12, batch 2650, loss[loss=0.1856, simple_loss=0.2673, pruned_loss=0.05192, over 18352.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2695, pruned_loss=0.04931, over 3581257.80 frames. ], batch size: 46, lr: 9.40e-03, grad_scale: 8.0 +2023-03-09 03:40:08,280 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4471, 6.0412, 5.5051, 5.7504, 5.5815, 5.4748, 6.0512, 6.1031], + device='cuda:1'), covar=tensor([0.1115, 0.0692, 0.0422, 0.0754, 0.1432, 0.0761, 0.0629, 0.0653], + device='cuda:1'), in_proj_covar=tensor([0.0548, 0.0448, 0.0346, 0.0485, 0.0668, 0.0493, 0.0641, 0.0478], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 03:40:31,626 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42649.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:40:34,773 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.962e+02 3.110e+02 3.787e+02 4.423e+02 7.417e+02, threshold=7.574e+02, percent-clipped=0.0 +2023-03-09 03:40:35,007 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42652.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 03:40:54,568 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1855, 4.6013, 4.6647, 4.6508, 4.2186, 4.5182, 4.0079, 4.5477], + device='cuda:1'), covar=tensor([0.0262, 0.0318, 0.0214, 0.0435, 0.0341, 0.0264, 0.1106, 0.0300], + device='cuda:1'), in_proj_covar=tensor([0.0180, 0.0227, 0.0213, 0.0259, 0.0226, 0.0228, 0.0286, 0.0216], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:1') +2023-03-09 03:41:00,849 INFO [train.py:898] (1/4) Epoch 12, batch 2700, loss[loss=0.2081, simple_loss=0.2908, pruned_loss=0.06271, over 18296.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2693, pruned_loss=0.04917, over 3590417.75 frames. ], batch size: 57, lr: 9.39e-03, grad_scale: 8.0 +2023-03-09 03:41:35,298 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0012, 4.9741, 5.0144, 4.8010, 4.7789, 4.8604, 5.2025, 5.1587], + device='cuda:1'), covar=tensor([0.0063, 0.0072, 0.0051, 0.0096, 0.0056, 0.0094, 0.0091, 0.0092], + device='cuda:1'), in_proj_covar=tensor([0.0082, 0.0058, 0.0061, 0.0078, 0.0064, 0.0090, 0.0075, 0.0074], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 03:41:42,311 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42710.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:41:59,013 INFO [train.py:898] (1/4) Epoch 12, batch 2750, loss[loss=0.1914, simple_loss=0.2816, pruned_loss=0.05058, over 16230.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2684, pruned_loss=0.04859, over 3582051.08 frames. ], batch size: 94, lr: 9.39e-03, grad_scale: 8.0 +2023-03-09 03:42:11,341 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42735.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:42:13,122 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-09 03:42:32,194 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.860e+02 3.123e+02 3.590e+02 4.311e+02 1.461e+03, threshold=7.180e+02, percent-clipped=1.0 +2023-03-09 03:42:58,258 INFO [train.py:898] (1/4) Epoch 12, batch 2800, loss[loss=0.166, simple_loss=0.2563, pruned_loss=0.03784, over 18379.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2683, pruned_loss=0.04823, over 3586052.79 frames. ], batch size: 50, lr: 9.38e-03, grad_scale: 8.0 +2023-03-09 03:43:52,958 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8225, 5.2562, 4.9414, 5.0642, 4.8348, 4.8443, 5.3251, 5.2511], + device='cuda:1'), covar=tensor([0.1229, 0.0756, 0.0782, 0.0740, 0.1654, 0.0675, 0.0703, 0.0776], + device='cuda:1'), in_proj_covar=tensor([0.0551, 0.0450, 0.0342, 0.0484, 0.0672, 0.0490, 0.0639, 0.0478], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 03:43:57,205 INFO [train.py:898] (1/4) Epoch 12, batch 2850, loss[loss=0.182, simple_loss=0.2705, pruned_loss=0.04675, over 18433.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2695, pruned_loss=0.04864, over 3576288.26 frames. ], batch size: 48, lr: 9.38e-03, grad_scale: 8.0 +2023-03-09 03:44:27,911 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.023e+02 3.014e+02 3.707e+02 4.689e+02 1.677e+03, threshold=7.413e+02, percent-clipped=4.0 +2023-03-09 03:44:37,063 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42859.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:44:40,345 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42862.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:44:45,448 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-09 03:44:54,940 INFO [train.py:898] (1/4) Epoch 12, batch 2900, loss[loss=0.2001, simple_loss=0.2905, pruned_loss=0.05488, over 17938.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2698, pruned_loss=0.04887, over 3576216.26 frames. ], batch size: 65, lr: 9.37e-03, grad_scale: 8.0 +2023-03-09 03:44:59,718 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42879.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:45:47,645 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42920.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:45:53,059 INFO [train.py:898] (1/4) Epoch 12, batch 2950, loss[loss=0.1731, simple_loss=0.2591, pruned_loss=0.04353, over 18366.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2707, pruned_loss=0.04914, over 3571174.78 frames. ], batch size: 46, lr: 9.36e-03, grad_scale: 8.0 +2023-03-09 03:46:11,099 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42940.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:46:24,423 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.040e+02 2.922e+02 3.740e+02 4.612e+02 1.225e+03, threshold=7.481e+02, percent-clipped=6.0 +2023-03-09 03:46:51,979 INFO [train.py:898] (1/4) Epoch 12, batch 3000, loss[loss=0.1696, simple_loss=0.2587, pruned_loss=0.04025, over 18298.00 frames. ], tot_loss[loss=0.185, simple_loss=0.271, pruned_loss=0.04949, over 3570229.25 frames. ], batch size: 49, lr: 9.36e-03, grad_scale: 8.0 +2023-03-09 03:46:51,979 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 03:47:04,092 INFO [train.py:932] (1/4) Epoch 12, validation: loss=0.1557, simple_loss=0.2578, pruned_loss=0.02677, over 944034.00 frames. +2023-03-09 03:47:04,093 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 03:47:40,287 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43005.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:48:03,072 INFO [train.py:898] (1/4) Epoch 12, batch 3050, loss[loss=0.1754, simple_loss=0.2603, pruned_loss=0.04523, over 18414.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2711, pruned_loss=0.04943, over 3586473.59 frames. ], batch size: 52, lr: 9.35e-03, grad_scale: 8.0 +2023-03-09 03:48:14,785 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43035.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:48:35,553 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.270e+02 3.222e+02 3.719e+02 4.518e+02 9.955e+02, threshold=7.438e+02, percent-clipped=1.0 +2023-03-09 03:48:53,799 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6781, 2.9482, 4.2520, 3.8915, 2.4528, 4.6406, 4.0332, 2.7610], + device='cuda:1'), covar=tensor([0.0454, 0.1369, 0.0282, 0.0277, 0.1769, 0.0152, 0.0378, 0.1052], + device='cuda:1'), in_proj_covar=tensor([0.0191, 0.0222, 0.0162, 0.0146, 0.0214, 0.0186, 0.0210, 0.0190], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 03:48:54,437 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-09 03:49:01,445 INFO [train.py:898] (1/4) Epoch 12, batch 3100, loss[loss=0.175, simple_loss=0.2623, pruned_loss=0.04383, over 17722.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.271, pruned_loss=0.04942, over 3592475.66 frames. ], batch size: 39, lr: 9.35e-03, grad_scale: 8.0 +2023-03-09 03:49:11,122 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43083.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:49:30,138 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0491, 4.5542, 4.7537, 3.4166, 3.6861, 3.6248, 2.6942, 2.3867], + device='cuda:1'), covar=tensor([0.0175, 0.0166, 0.0059, 0.0275, 0.0384, 0.0214, 0.0729, 0.0968], + device='cuda:1'), in_proj_covar=tensor([0.0061, 0.0048, 0.0050, 0.0061, 0.0083, 0.0060, 0.0073, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0006, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-09 03:49:53,700 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7462, 5.2731, 5.3205, 5.3169, 4.7745, 5.1790, 4.4097, 5.1256], + device='cuda:1'), covar=tensor([0.0244, 0.0324, 0.0198, 0.0413, 0.0400, 0.0258, 0.1270, 0.0307], + device='cuda:1'), in_proj_covar=tensor([0.0182, 0.0229, 0.0218, 0.0262, 0.0229, 0.0230, 0.0284, 0.0220], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:1') +2023-03-09 03:50:00,207 INFO [train.py:898] (1/4) Epoch 12, batch 3150, loss[loss=0.1619, simple_loss=0.2597, pruned_loss=0.03204, over 18484.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2689, pruned_loss=0.0486, over 3602881.51 frames. ], batch size: 53, lr: 9.34e-03, grad_scale: 8.0 +2023-03-09 03:50:31,050 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.182e+02 3.274e+02 3.785e+02 4.716e+02 1.243e+03, threshold=7.571e+02, percent-clipped=5.0 +2023-03-09 03:50:43,655 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43162.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:50:58,067 INFO [train.py:898] (1/4) Epoch 12, batch 3200, loss[loss=0.2006, simple_loss=0.2928, pruned_loss=0.05417, over 18496.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2697, pruned_loss=0.04913, over 3583321.00 frames. ], batch size: 51, lr: 9.34e-03, grad_scale: 8.0 +2023-03-09 03:51:39,585 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43210.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:51:45,955 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43215.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:51:57,234 INFO [train.py:898] (1/4) Epoch 12, batch 3250, loss[loss=0.1738, simple_loss=0.252, pruned_loss=0.0478, over 18393.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2691, pruned_loss=0.04897, over 3579360.13 frames. ], batch size: 42, lr: 9.33e-03, grad_scale: 8.0 +2023-03-09 03:52:08,861 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43235.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:52:28,373 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.179e+02 3.099e+02 3.708e+02 4.167e+02 9.547e+02, threshold=7.415e+02, percent-clipped=3.0 +2023-03-09 03:52:44,802 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-09 03:52:55,947 INFO [train.py:898] (1/4) Epoch 12, batch 3300, loss[loss=0.1525, simple_loss=0.2338, pruned_loss=0.03565, over 18364.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2689, pruned_loss=0.04853, over 3588048.41 frames. ], batch size: 42, lr: 9.33e-03, grad_scale: 8.0 +2023-03-09 03:53:30,873 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43305.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:53:54,841 INFO [train.py:898] (1/4) Epoch 12, batch 3350, loss[loss=0.1925, simple_loss=0.283, pruned_loss=0.05096, over 18495.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2677, pruned_loss=0.04814, over 3583837.39 frames. ], batch size: 51, lr: 9.32e-03, grad_scale: 8.0 +2023-03-09 03:54:25,612 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.041e+02 3.064e+02 3.610e+02 4.297e+02 1.025e+03, threshold=7.219e+02, percent-clipped=3.0 +2023-03-09 03:54:26,037 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8096, 4.8208, 4.9133, 4.6616, 4.6416, 4.7250, 5.0988, 4.9663], + device='cuda:1'), covar=tensor([0.0061, 0.0071, 0.0061, 0.0090, 0.0064, 0.0097, 0.0066, 0.0097], + device='cuda:1'), in_proj_covar=tensor([0.0081, 0.0057, 0.0060, 0.0077, 0.0064, 0.0088, 0.0074, 0.0074], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 03:54:26,916 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43353.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:54:36,334 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-03-09 03:54:53,281 INFO [train.py:898] (1/4) Epoch 12, batch 3400, loss[loss=0.1829, simple_loss=0.276, pruned_loss=0.04491, over 18355.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.269, pruned_loss=0.0486, over 3583361.75 frames. ], batch size: 55, lr: 9.32e-03, grad_scale: 8.0 +2023-03-09 03:55:18,879 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6868, 2.5418, 2.6254, 2.4641, 2.6038, 2.2144, 2.2960, 2.6363], + device='cuda:1'), covar=tensor([0.0047, 0.0082, 0.0056, 0.0084, 0.0063, 0.0136, 0.0129, 0.0053], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0126, 0.0108, 0.0158, 0.0111, 0.0156, 0.0161, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 03:55:51,947 INFO [train.py:898] (1/4) Epoch 12, batch 3450, loss[loss=0.1856, simple_loss=0.2792, pruned_loss=0.04606, over 17094.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2692, pruned_loss=0.04846, over 3591078.88 frames. ], batch size: 78, lr: 9.31e-03, grad_scale: 8.0 +2023-03-09 03:55:54,650 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4658, 3.2865, 1.9611, 4.2706, 3.0116, 4.2953, 2.0544, 3.7360], + device='cuda:1'), covar=tensor([0.0460, 0.0747, 0.1291, 0.0391, 0.0674, 0.0235, 0.1186, 0.0350], + device='cuda:1'), in_proj_covar=tensor([0.0193, 0.0212, 0.0179, 0.0245, 0.0179, 0.0241, 0.0189, 0.0184], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 03:55:59,111 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5293, 6.1598, 5.5318, 5.9100, 5.6840, 5.6094, 6.1752, 6.1431], + device='cuda:1'), covar=tensor([0.1269, 0.0589, 0.0428, 0.0630, 0.1526, 0.0735, 0.0533, 0.0603], + device='cuda:1'), in_proj_covar=tensor([0.0539, 0.0445, 0.0338, 0.0480, 0.0656, 0.0483, 0.0631, 0.0469], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 03:56:23,234 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.156e+02 3.064e+02 3.567e+02 4.230e+02 7.375e+02, threshold=7.135e+02, percent-clipped=1.0 +2023-03-09 03:56:51,213 INFO [train.py:898] (1/4) Epoch 12, batch 3500, loss[loss=0.1879, simple_loss=0.2815, pruned_loss=0.04718, over 17926.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2688, pruned_loss=0.04842, over 3593469.93 frames. ], batch size: 65, lr: 9.31e-03, grad_scale: 8.0 +2023-03-09 03:57:35,055 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43514.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:57:36,020 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43515.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:57:46,262 INFO [train.py:898] (1/4) Epoch 12, batch 3550, loss[loss=0.2497, simple_loss=0.3156, pruned_loss=0.09189, over 12556.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2689, pruned_loss=0.04884, over 3574710.68 frames. ], batch size: 129, lr: 9.30e-03, grad_scale: 8.0 +2023-03-09 03:57:57,022 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43535.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:58:15,320 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.095e+02 3.008e+02 3.651e+02 4.331e+02 1.115e+03, threshold=7.301e+02, percent-clipped=5.0 +2023-03-09 03:58:27,330 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43563.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:58:34,940 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6885, 2.0776, 2.5170, 2.7179, 3.4120, 5.1438, 4.6601, 3.8592], + device='cuda:1'), covar=tensor([0.1345, 0.2235, 0.2776, 0.1503, 0.1971, 0.0110, 0.0412, 0.0594], + device='cuda:1'), in_proj_covar=tensor([0.0254, 0.0310, 0.0330, 0.0252, 0.0367, 0.0199, 0.0267, 0.0214], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 03:58:40,415 INFO [train.py:898] (1/4) Epoch 12, batch 3600, loss[loss=0.1559, simple_loss=0.2374, pruned_loss=0.03721, over 18159.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2687, pruned_loss=0.04864, over 3588017.84 frames. ], batch size: 44, lr: 9.30e-03, grad_scale: 8.0 +2023-03-09 03:58:40,784 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43575.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:58:49,372 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43583.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:58:59,676 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9122, 3.2558, 4.5472, 4.0760, 2.9018, 4.7862, 4.1641, 3.1957], + device='cuda:1'), covar=tensor([0.0450, 0.1255, 0.0178, 0.0341, 0.1483, 0.0169, 0.0385, 0.0886], + device='cuda:1'), in_proj_covar=tensor([0.0193, 0.0223, 0.0161, 0.0146, 0.0215, 0.0188, 0.0210, 0.0193], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 03:59:46,564 INFO [train.py:898] (1/4) Epoch 13, batch 0, loss[loss=0.2112, simple_loss=0.2968, pruned_loss=0.06277, over 18298.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2968, pruned_loss=0.06277, over 18298.00 frames. ], batch size: 57, lr: 8.93e-03, grad_scale: 8.0 +2023-03-09 03:59:46,565 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 03:59:58,387 INFO [train.py:932] (1/4) Epoch 13, validation: loss=0.1568, simple_loss=0.2587, pruned_loss=0.02742, over 944034.00 frames. +2023-03-09 03:59:58,388 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 04:00:01,937 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3330, 5.1724, 5.5023, 5.4182, 5.3045, 6.0692, 5.6786, 5.4867], + device='cuda:1'), covar=tensor([0.0949, 0.0595, 0.0723, 0.0706, 0.1298, 0.0736, 0.0596, 0.1401], + device='cuda:1'), in_proj_covar=tensor([0.0315, 0.0243, 0.0258, 0.0257, 0.0302, 0.0365, 0.0245, 0.0355], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:1') +2023-03-09 04:00:07,029 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6473, 5.5206, 5.1411, 5.3749, 4.8583, 5.3924, 5.6457, 5.4496], + device='cuda:1'), covar=tensor([0.2879, 0.1453, 0.0957, 0.1468, 0.2845, 0.1208, 0.1220, 0.1355], + device='cuda:1'), in_proj_covar=tensor([0.0532, 0.0444, 0.0337, 0.0475, 0.0645, 0.0477, 0.0626, 0.0469], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 04:00:49,453 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.977e+02 3.410e+02 4.106e+02 5.043e+02 1.786e+03, threshold=8.212e+02, percent-clipped=7.0 +2023-03-09 04:00:57,357 INFO [train.py:898] (1/4) Epoch 13, batch 50, loss[loss=0.1786, simple_loss=0.2643, pruned_loss=0.04647, over 18310.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2654, pruned_loss=0.04761, over 819469.87 frames. ], batch size: 54, lr: 8.92e-03, grad_scale: 8.0 +2023-03-09 04:01:53,903 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-03-09 04:01:56,201 INFO [train.py:898] (1/4) Epoch 13, batch 100, loss[loss=0.2116, simple_loss=0.2988, pruned_loss=0.06227, over 18095.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2665, pruned_loss=0.04745, over 1438841.48 frames. ], batch size: 62, lr: 8.92e-03, grad_scale: 8.0 +2023-03-09 04:02:46,912 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-03-09 04:02:47,116 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.935e+02 2.941e+02 3.301e+02 3.834e+02 7.601e+02, threshold=6.602e+02, percent-clipped=0.0 +2023-03-09 04:02:55,217 INFO [train.py:898] (1/4) Epoch 13, batch 150, loss[loss=0.196, simple_loss=0.2763, pruned_loss=0.0578, over 18348.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2649, pruned_loss=0.04693, over 1930919.75 frames. ], batch size: 56, lr: 8.91e-03, grad_scale: 8.0 +2023-03-09 04:03:28,260 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43787.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:03:53,899 INFO [train.py:898] (1/4) Epoch 13, batch 200, loss[loss=0.1872, simple_loss=0.2771, pruned_loss=0.04867, over 18586.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2673, pruned_loss=0.0476, over 2309452.93 frames. ], batch size: 54, lr: 8.91e-03, grad_scale: 8.0 +2023-03-09 04:04:18,198 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6601, 3.5090, 2.1796, 4.3891, 3.2476, 4.4309, 2.3457, 3.8608], + device='cuda:1'), covar=tensor([0.0503, 0.0837, 0.1378, 0.0525, 0.0765, 0.0301, 0.1187, 0.0432], + device='cuda:1'), in_proj_covar=tensor([0.0198, 0.0214, 0.0181, 0.0249, 0.0182, 0.0245, 0.0191, 0.0188], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 04:04:40,684 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43848.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:04:44,846 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.130e+02 3.045e+02 3.524e+02 4.456e+02 7.991e+02, threshold=7.049e+02, percent-clipped=5.0 +2023-03-09 04:04:53,610 INFO [train.py:898] (1/4) Epoch 13, batch 250, loss[loss=0.1574, simple_loss=0.2472, pruned_loss=0.03378, over 18378.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2665, pruned_loss=0.04718, over 2595214.97 frames. ], batch size: 50, lr: 8.90e-03, grad_scale: 8.0 +2023-03-09 04:05:06,475 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43870.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:05:21,432 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7267, 3.2321, 3.9342, 2.8543, 3.6758, 2.6190, 2.7101, 2.2440], + device='cuda:1'), covar=tensor([0.0794, 0.0773, 0.0177, 0.0555, 0.0551, 0.1945, 0.2014, 0.1279], + device='cuda:1'), in_proj_covar=tensor([0.0199, 0.0217, 0.0124, 0.0172, 0.0230, 0.0248, 0.0287, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 04:05:52,795 INFO [train.py:898] (1/4) Epoch 13, batch 300, loss[loss=0.1794, simple_loss=0.2723, pruned_loss=0.04324, over 18627.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2662, pruned_loss=0.04697, over 2811696.91 frames. ], batch size: 52, lr: 8.90e-03, grad_scale: 8.0 +2023-03-09 04:05:53,015 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4704, 5.2036, 5.6619, 5.6646, 5.4557, 6.2381, 5.9760, 5.4400], + device='cuda:1'), covar=tensor([0.1114, 0.0587, 0.0651, 0.0611, 0.1329, 0.0704, 0.0470, 0.1574], + device='cuda:1'), in_proj_covar=tensor([0.0318, 0.0243, 0.0260, 0.0258, 0.0301, 0.0366, 0.0244, 0.0362], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:1') +2023-03-09 04:05:53,211 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43909.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:06:12,189 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-03-09 04:06:43,697 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.829e+02 2.907e+02 3.635e+02 4.242e+02 7.161e+02, threshold=7.270e+02, percent-clipped=1.0 +2023-03-09 04:06:52,912 INFO [train.py:898] (1/4) Epoch 13, batch 350, loss[loss=0.1575, simple_loss=0.2301, pruned_loss=0.04246, over 18426.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2666, pruned_loss=0.04707, over 2994433.92 frames. ], batch size: 43, lr: 8.89e-03, grad_scale: 8.0 +2023-03-09 04:06:53,304 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43959.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:07:06,086 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43970.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:07:17,826 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.3332, 2.1025, 1.9003, 2.0684, 2.3911, 2.3807, 2.2300, 2.0146], + device='cuda:1'), covar=tensor([0.0215, 0.0183, 0.0446, 0.0357, 0.0193, 0.0189, 0.0308, 0.0314], + device='cuda:1'), in_proj_covar=tensor([0.0123, 0.0110, 0.0152, 0.0141, 0.0109, 0.0094, 0.0137, 0.0134], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 04:07:23,559 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6295, 2.5550, 2.5922, 2.4449, 2.6106, 2.1485, 2.3427, 2.6410], + device='cuda:1'), covar=tensor([0.0064, 0.0092, 0.0059, 0.0090, 0.0076, 0.0152, 0.0141, 0.0058], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0129, 0.0111, 0.0161, 0.0113, 0.0159, 0.0163, 0.0093], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 04:07:56,738 INFO [train.py:898] (1/4) Epoch 13, batch 400, loss[loss=0.1911, simple_loss=0.2772, pruned_loss=0.05253, over 18297.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2676, pruned_loss=0.04758, over 3115324.51 frames. ], batch size: 57, lr: 8.89e-03, grad_scale: 8.0 +2023-03-09 04:08:10,330 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44020.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:08:34,726 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-09 04:08:47,392 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.657e+02 2.874e+02 3.424e+02 4.227e+02 8.593e+02, threshold=6.849e+02, percent-clipped=4.0 +2023-03-09 04:08:55,715 INFO [train.py:898] (1/4) Epoch 13, batch 450, loss[loss=0.1777, simple_loss=0.2632, pruned_loss=0.04612, over 18277.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2674, pruned_loss=0.04756, over 3221818.85 frames. ], batch size: 49, lr: 8.88e-03, grad_scale: 16.0 +2023-03-09 04:09:54,522 INFO [train.py:898] (1/4) Epoch 13, batch 500, loss[loss=0.1713, simple_loss=0.2514, pruned_loss=0.04556, over 18157.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2666, pruned_loss=0.04739, over 3301446.51 frames. ], batch size: 44, lr: 8.88e-03, grad_scale: 16.0 +2023-03-09 04:10:34,487 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44143.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:10:44,864 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.103e+02 3.159e+02 3.633e+02 4.577e+02 9.760e+02, threshold=7.265e+02, percent-clipped=1.0 +2023-03-09 04:10:53,407 INFO [train.py:898] (1/4) Epoch 13, batch 550, loss[loss=0.1786, simple_loss=0.2529, pruned_loss=0.05212, over 18233.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2677, pruned_loss=0.0477, over 3377542.38 frames. ], batch size: 45, lr: 8.87e-03, grad_scale: 16.0 +2023-03-09 04:11:07,525 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44170.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:11:40,869 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7744, 4.4685, 4.6852, 3.5310, 3.7284, 3.4194, 2.5742, 2.4875], + device='cuda:1'), covar=tensor([0.0213, 0.0144, 0.0057, 0.0249, 0.0299, 0.0222, 0.0717, 0.0841], + device='cuda:1'), in_proj_covar=tensor([0.0060, 0.0047, 0.0049, 0.0060, 0.0081, 0.0058, 0.0070, 0.0077], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0006, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-09 04:11:53,556 INFO [train.py:898] (1/4) Epoch 13, batch 600, loss[loss=0.1921, simple_loss=0.2801, pruned_loss=0.05203, over 18223.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2686, pruned_loss=0.0479, over 3420512.91 frames. ], batch size: 60, lr: 8.87e-03, grad_scale: 16.0 +2023-03-09 04:12:04,403 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44218.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:12:16,288 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3142, 5.8793, 5.3374, 5.6647, 5.3320, 5.2894, 5.8976, 5.8976], + device='cuda:1'), covar=tensor([0.1289, 0.0675, 0.0561, 0.0686, 0.1571, 0.0675, 0.0586, 0.0637], + device='cuda:1'), in_proj_covar=tensor([0.0529, 0.0442, 0.0337, 0.0478, 0.0653, 0.0478, 0.0625, 0.0469], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 04:12:42,049 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4839, 4.4553, 2.7914, 4.4100, 5.5399, 2.7183, 4.0222, 4.2087], + device='cuda:1'), covar=tensor([0.0066, 0.1072, 0.1298, 0.0471, 0.0032, 0.1192, 0.0621, 0.0649], + device='cuda:1'), in_proj_covar=tensor([0.0125, 0.0239, 0.0192, 0.0191, 0.0094, 0.0175, 0.0205, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 04:12:42,718 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.069e+02 3.135e+02 3.711e+02 4.524e+02 8.017e+02, threshold=7.422e+02, percent-clipped=1.0 +2023-03-09 04:12:51,317 INFO [train.py:898] (1/4) Epoch 13, batch 650, loss[loss=0.1826, simple_loss=0.2731, pruned_loss=0.04606, over 18368.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2678, pruned_loss=0.04763, over 3468470.15 frames. ], batch size: 55, lr: 8.86e-03, grad_scale: 16.0 +2023-03-09 04:12:58,884 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44265.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:13:49,383 INFO [train.py:898] (1/4) Epoch 13, batch 700, loss[loss=0.2057, simple_loss=0.2952, pruned_loss=0.05811, over 18299.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2663, pruned_loss=0.04722, over 3513005.94 frames. ], batch size: 54, lr: 8.86e-03, grad_scale: 8.0 +2023-03-09 04:13:57,526 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44315.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:14:41,971 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.372e+02 3.099e+02 3.698e+02 4.790e+02 1.213e+03, threshold=7.395e+02, percent-clipped=5.0 +2023-03-09 04:14:48,764 INFO [train.py:898] (1/4) Epoch 13, batch 750, loss[loss=0.153, simple_loss=0.233, pruned_loss=0.03653, over 17689.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2671, pruned_loss=0.04733, over 3528071.99 frames. ], batch size: 39, lr: 8.85e-03, grad_scale: 8.0 +2023-03-09 04:15:48,579 INFO [train.py:898] (1/4) Epoch 13, batch 800, loss[loss=0.2119, simple_loss=0.3037, pruned_loss=0.06003, over 18355.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2684, pruned_loss=0.04782, over 3550718.14 frames. ], batch size: 56, lr: 8.85e-03, grad_scale: 8.0 +2023-03-09 04:16:29,465 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44443.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:16:40,511 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.201e+02 3.366e+02 4.098e+02 5.276e+02 1.273e+03, threshold=8.196e+02, percent-clipped=10.0 +2023-03-09 04:16:47,514 INFO [train.py:898] (1/4) Epoch 13, batch 850, loss[loss=0.1644, simple_loss=0.2509, pruned_loss=0.0389, over 18427.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2682, pruned_loss=0.04802, over 3564008.74 frames. ], batch size: 48, lr: 8.84e-03, grad_scale: 8.0 +2023-03-09 04:17:26,156 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44491.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:17:45,919 INFO [train.py:898] (1/4) Epoch 13, batch 900, loss[loss=0.1616, simple_loss=0.245, pruned_loss=0.03913, over 18240.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2683, pruned_loss=0.04786, over 3576223.95 frames. ], batch size: 45, lr: 8.84e-03, grad_scale: 8.0 +2023-03-09 04:18:07,288 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44527.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:18:34,301 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44550.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:18:37,463 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.936e+02 2.996e+02 3.521e+02 4.536e+02 7.966e+02, threshold=7.042e+02, percent-clipped=0.0 +2023-03-09 04:18:44,386 INFO [train.py:898] (1/4) Epoch 13, batch 950, loss[loss=0.17, simple_loss=0.248, pruned_loss=0.04604, over 18412.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2679, pruned_loss=0.04803, over 3573903.19 frames. ], batch size: 42, lr: 8.84e-03, grad_scale: 8.0 +2023-03-09 04:18:51,516 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44565.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:19:14,373 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-03-09 04:19:18,493 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44588.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:19:43,379 INFO [train.py:898] (1/4) Epoch 13, batch 1000, loss[loss=0.1755, simple_loss=0.2574, pruned_loss=0.04676, over 18293.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2675, pruned_loss=0.04785, over 3585726.18 frames. ], batch size: 49, lr: 8.83e-03, grad_scale: 8.0 +2023-03-09 04:19:46,144 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44611.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:19:48,237 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44613.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:19:50,578 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44615.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:19:56,948 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-03-09 04:20:19,001 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7120, 5.2936, 5.2920, 5.2515, 4.8260, 5.1722, 4.5603, 5.1824], + device='cuda:1'), covar=tensor([0.0245, 0.0276, 0.0189, 0.0374, 0.0381, 0.0213, 0.1172, 0.0276], + device='cuda:1'), in_proj_covar=tensor([0.0187, 0.0234, 0.0223, 0.0267, 0.0236, 0.0233, 0.0294, 0.0226], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 04:20:36,781 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.889e+02 3.069e+02 3.543e+02 4.304e+02 1.212e+03, threshold=7.086e+02, percent-clipped=7.0 +2023-03-09 04:20:42,640 INFO [train.py:898] (1/4) Epoch 13, batch 1050, loss[loss=0.1992, simple_loss=0.2878, pruned_loss=0.05527, over 18300.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2683, pruned_loss=0.04812, over 3569838.28 frames. ], batch size: 54, lr: 8.83e-03, grad_scale: 4.0 +2023-03-09 04:20:47,349 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44663.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:21:16,048 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7731, 3.0063, 4.2350, 3.9869, 2.4281, 4.4471, 3.9785, 2.6960], + device='cuda:1'), covar=tensor([0.0366, 0.1281, 0.0207, 0.0263, 0.1677, 0.0197, 0.0429, 0.1163], + device='cuda:1'), in_proj_covar=tensor([0.0192, 0.0221, 0.0164, 0.0144, 0.0212, 0.0188, 0.0211, 0.0191], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 04:21:41,958 INFO [train.py:898] (1/4) Epoch 13, batch 1100, loss[loss=0.1971, simple_loss=0.2813, pruned_loss=0.05639, over 18343.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2684, pruned_loss=0.04785, over 3586745.23 frames. ], batch size: 55, lr: 8.82e-03, grad_scale: 4.0 +2023-03-09 04:22:35,539 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.712e+02 2.982e+02 3.567e+02 4.035e+02 7.842e+02, threshold=7.134e+02, percent-clipped=1.0 +2023-03-09 04:22:41,042 INFO [train.py:898] (1/4) Epoch 13, batch 1150, loss[loss=0.1689, simple_loss=0.2499, pruned_loss=0.04399, over 18245.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2669, pruned_loss=0.04737, over 3596774.11 frames. ], batch size: 45, lr: 8.82e-03, grad_scale: 4.0 +2023-03-09 04:23:40,918 INFO [train.py:898] (1/4) Epoch 13, batch 1200, loss[loss=0.165, simple_loss=0.2437, pruned_loss=0.04318, over 18100.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2674, pruned_loss=0.04747, over 3593776.80 frames. ], batch size: 40, lr: 8.81e-03, grad_scale: 8.0 +2023-03-09 04:24:33,793 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.745e+02 2.967e+02 3.663e+02 4.375e+02 1.162e+03, threshold=7.327e+02, percent-clipped=1.0 +2023-03-09 04:24:40,012 INFO [train.py:898] (1/4) Epoch 13, batch 1250, loss[loss=0.1714, simple_loss=0.2533, pruned_loss=0.0448, over 18269.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2671, pruned_loss=0.04731, over 3598371.54 frames. ], batch size: 49, lr: 8.81e-03, grad_scale: 8.0 +2023-03-09 04:25:07,168 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44883.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:25:35,383 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44906.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:25:38,456 INFO [train.py:898] (1/4) Epoch 13, batch 1300, loss[loss=0.2007, simple_loss=0.2865, pruned_loss=0.05748, over 16336.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2674, pruned_loss=0.04746, over 3604606.91 frames. ], batch size: 94, lr: 8.80e-03, grad_scale: 8.0 +2023-03-09 04:26:22,959 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5731, 3.6703, 5.1858, 4.5445, 3.2808, 3.0653, 4.5518, 5.3409], + device='cuda:1'), covar=tensor([0.0813, 0.1571, 0.0095, 0.0313, 0.0859, 0.1038, 0.0317, 0.0180], + device='cuda:1'), in_proj_covar=tensor([0.0135, 0.0246, 0.0110, 0.0163, 0.0178, 0.0174, 0.0174, 0.0156], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 04:26:31,153 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.969e+02 2.987e+02 3.642e+02 4.868e+02 9.952e+02, threshold=7.283e+02, percent-clipped=3.0 +2023-03-09 04:26:36,905 INFO [train.py:898] (1/4) Epoch 13, batch 1350, loss[loss=0.1935, simple_loss=0.2763, pruned_loss=0.05531, over 17911.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2676, pruned_loss=0.04769, over 3591694.95 frames. ], batch size: 65, lr: 8.80e-03, grad_scale: 8.0 +2023-03-09 04:27:36,020 INFO [train.py:898] (1/4) Epoch 13, batch 1400, loss[loss=0.1565, simple_loss=0.2344, pruned_loss=0.0393, over 18388.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2674, pruned_loss=0.04774, over 3597653.81 frames. ], batch size: 42, lr: 8.79e-03, grad_scale: 8.0 +2023-03-09 04:28:28,831 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.199e+02 3.129e+02 3.917e+02 4.727e+02 1.364e+03, threshold=7.834e+02, percent-clipped=4.0 +2023-03-09 04:28:32,234 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45056.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:28:35,354 INFO [train.py:898] (1/4) Epoch 13, batch 1450, loss[loss=0.1827, simple_loss=0.2711, pruned_loss=0.0471, over 17728.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2673, pruned_loss=0.04777, over 3591949.68 frames. ], batch size: 70, lr: 8.79e-03, grad_scale: 8.0 +2023-03-09 04:29:01,602 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45081.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:29:15,254 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.3597, 3.3613, 4.7047, 3.9253, 3.1223, 2.8337, 4.1035, 4.7924], + device='cuda:1'), covar=tensor([0.0820, 0.1379, 0.0114, 0.0396, 0.0844, 0.1063, 0.0372, 0.0156], + device='cuda:1'), in_proj_covar=tensor([0.0138, 0.0251, 0.0112, 0.0167, 0.0181, 0.0177, 0.0178, 0.0159], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 04:29:34,108 INFO [train.py:898] (1/4) Epoch 13, batch 1500, loss[loss=0.1782, simple_loss=0.2504, pruned_loss=0.05299, over 16765.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2679, pruned_loss=0.04803, over 3590104.55 frames. ], batch size: 37, lr: 8.78e-03, grad_scale: 8.0 +2023-03-09 04:29:44,376 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45117.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 04:30:13,250 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45142.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:30:15,582 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6157, 3.6143, 2.0418, 4.4785, 3.1284, 4.4732, 2.4971, 4.1536], + device='cuda:1'), covar=tensor([0.0565, 0.0776, 0.1677, 0.0522, 0.0890, 0.0321, 0.1242, 0.0349], + device='cuda:1'), in_proj_covar=tensor([0.0201, 0.0218, 0.0187, 0.0252, 0.0185, 0.0251, 0.0196, 0.0193], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 04:30:18,102 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-03-09 04:30:26,572 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.379e+02 3.129e+02 3.724e+02 4.200e+02 9.663e+02, threshold=7.448e+02, percent-clipped=2.0 +2023-03-09 04:30:33,393 INFO [train.py:898] (1/4) Epoch 13, batch 1550, loss[loss=0.1792, simple_loss=0.2749, pruned_loss=0.04179, over 18560.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2686, pruned_loss=0.04838, over 3573236.27 frames. ], batch size: 54, lr: 8.78e-03, grad_scale: 8.0 +2023-03-09 04:30:44,445 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-09 04:31:01,969 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45183.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:31:05,571 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5395, 3.4461, 1.9918, 4.3873, 3.0986, 4.2364, 2.3656, 3.8023], + device='cuda:1'), covar=tensor([0.0521, 0.0751, 0.1473, 0.0411, 0.0770, 0.0317, 0.1141, 0.0381], + device='cuda:1'), in_proj_covar=tensor([0.0199, 0.0215, 0.0184, 0.0249, 0.0183, 0.0249, 0.0195, 0.0191], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 04:31:23,863 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3227, 4.3796, 2.7589, 4.4735, 5.3466, 2.5794, 3.9115, 4.1007], + device='cuda:1'), covar=tensor([0.0085, 0.1031, 0.1362, 0.0517, 0.0048, 0.1211, 0.0611, 0.0651], + device='cuda:1'), in_proj_covar=tensor([0.0127, 0.0241, 0.0193, 0.0191, 0.0094, 0.0174, 0.0204, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 04:31:27,116 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2910, 5.3132, 4.8621, 5.2012, 5.2152, 4.5676, 5.0970, 4.8988], + device='cuda:1'), covar=tensor([0.0435, 0.0381, 0.1321, 0.0773, 0.0514, 0.0403, 0.0441, 0.0950], + device='cuda:1'), in_proj_covar=tensor([0.0424, 0.0487, 0.0642, 0.0387, 0.0380, 0.0443, 0.0470, 0.0613], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 04:31:28,174 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45206.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:31:32,576 INFO [train.py:898] (1/4) Epoch 13, batch 1600, loss[loss=0.1967, simple_loss=0.2809, pruned_loss=0.05623, over 16294.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2689, pruned_loss=0.04841, over 3567257.37 frames. ], batch size: 94, lr: 8.77e-03, grad_scale: 8.0 +2023-03-09 04:31:58,490 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45231.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:32:24,703 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.718e+02 3.180e+02 3.814e+02 4.660e+02 1.002e+03, threshold=7.628e+02, percent-clipped=5.0 +2023-03-09 04:32:24,915 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45254.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:32:30,352 INFO [train.py:898] (1/4) Epoch 13, batch 1650, loss[loss=0.1759, simple_loss=0.267, pruned_loss=0.04242, over 18617.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2683, pruned_loss=0.04805, over 3564714.39 frames. ], batch size: 52, lr: 8.77e-03, grad_scale: 8.0 +2023-03-09 04:33:29,069 INFO [train.py:898] (1/4) Epoch 13, batch 1700, loss[loss=0.1839, simple_loss=0.2721, pruned_loss=0.04783, over 18211.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2678, pruned_loss=0.04778, over 3585734.46 frames. ], batch size: 60, lr: 8.76e-03, grad_scale: 8.0 +2023-03-09 04:33:43,970 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1218, 4.1762, 2.5798, 4.3574, 5.1624, 2.4708, 3.6216, 3.9687], + device='cuda:1'), covar=tensor([0.0088, 0.0978, 0.1522, 0.0469, 0.0054, 0.1276, 0.0745, 0.0701], + device='cuda:1'), in_proj_covar=tensor([0.0129, 0.0243, 0.0196, 0.0192, 0.0095, 0.0176, 0.0207, 0.0210], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 04:34:03,598 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-09 04:34:15,799 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45348.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 04:34:22,328 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.903e+02 2.834e+02 3.670e+02 4.509e+02 1.027e+03, threshold=7.340e+02, percent-clipped=3.0 +2023-03-09 04:34:28,087 INFO [train.py:898] (1/4) Epoch 13, batch 1750, loss[loss=0.1958, simple_loss=0.2822, pruned_loss=0.0547, over 18100.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2674, pruned_loss=0.0475, over 3586529.38 frames. ], batch size: 62, lr: 8.76e-03, grad_scale: 8.0 +2023-03-09 04:34:45,100 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-09 04:35:12,782 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3304, 5.3251, 4.8728, 5.1984, 5.1963, 4.6231, 5.1744, 4.9037], + device='cuda:1'), covar=tensor([0.0458, 0.0509, 0.1532, 0.0868, 0.0631, 0.0479, 0.0409, 0.1069], + device='cuda:1'), in_proj_covar=tensor([0.0421, 0.0492, 0.0645, 0.0386, 0.0382, 0.0443, 0.0469, 0.0614], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 04:35:27,717 INFO [train.py:898] (1/4) Epoch 13, batch 1800, loss[loss=0.159, simple_loss=0.2366, pruned_loss=0.04068, over 18143.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2682, pruned_loss=0.04805, over 3577768.89 frames. ], batch size: 44, lr: 8.75e-03, grad_scale: 8.0 +2023-03-09 04:35:28,191 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45409.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 04:35:31,554 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45412.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 04:35:44,585 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4874, 2.7385, 4.1092, 3.6979, 2.4180, 4.3987, 3.8421, 2.7948], + device='cuda:1'), covar=tensor([0.0478, 0.1409, 0.0217, 0.0348, 0.1622, 0.0194, 0.0443, 0.0968], + device='cuda:1'), in_proj_covar=tensor([0.0196, 0.0224, 0.0164, 0.0147, 0.0214, 0.0193, 0.0213, 0.0192], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 04:36:01,077 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45437.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:36:21,057 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.197e+02 3.089e+02 3.615e+02 4.391e+02 8.521e+02, threshold=7.230e+02, percent-clipped=5.0 +2023-03-09 04:36:26,744 INFO [train.py:898] (1/4) Epoch 13, batch 1850, loss[loss=0.2003, simple_loss=0.2839, pruned_loss=0.05831, over 17685.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2676, pruned_loss=0.048, over 3575412.60 frames. ], batch size: 70, lr: 8.75e-03, grad_scale: 8.0 +2023-03-09 04:36:33,974 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9404, 5.0048, 4.9940, 4.8033, 4.7330, 4.8487, 5.1840, 5.1850], + device='cuda:1'), covar=tensor([0.0063, 0.0057, 0.0058, 0.0095, 0.0066, 0.0125, 0.0056, 0.0073], + device='cuda:1'), in_proj_covar=tensor([0.0083, 0.0058, 0.0062, 0.0079, 0.0066, 0.0090, 0.0075, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 04:37:07,406 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-09 04:37:25,617 INFO [train.py:898] (1/4) Epoch 13, batch 1900, loss[loss=0.2487, simple_loss=0.3073, pruned_loss=0.09504, over 12440.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2682, pruned_loss=0.04836, over 3573483.07 frames. ], batch size: 134, lr: 8.74e-03, grad_scale: 8.0 +2023-03-09 04:37:31,499 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5787, 3.7917, 5.1652, 4.4317, 3.3731, 3.0183, 4.3800, 5.2895], + device='cuda:1'), covar=tensor([0.0826, 0.1362, 0.0115, 0.0338, 0.0884, 0.1104, 0.0355, 0.0207], + device='cuda:1'), in_proj_covar=tensor([0.0138, 0.0250, 0.0113, 0.0165, 0.0181, 0.0178, 0.0178, 0.0160], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 04:38:18,704 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.016e+02 2.946e+02 3.593e+02 4.543e+02 1.018e+03, threshold=7.187e+02, percent-clipped=4.0 +2023-03-09 04:38:24,493 INFO [train.py:898] (1/4) Epoch 13, batch 1950, loss[loss=0.1879, simple_loss=0.2811, pruned_loss=0.04738, over 18477.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2681, pruned_loss=0.04811, over 3579478.57 frames. ], batch size: 53, lr: 8.74e-03, grad_scale: 8.0 +2023-03-09 04:38:29,526 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8999, 5.4768, 5.5000, 5.4278, 5.0751, 5.3696, 4.7929, 5.3496], + device='cuda:1'), covar=tensor([0.0219, 0.0253, 0.0155, 0.0347, 0.0323, 0.0196, 0.1032, 0.0263], + device='cuda:1'), in_proj_covar=tensor([0.0185, 0.0233, 0.0221, 0.0266, 0.0233, 0.0232, 0.0289, 0.0222], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:1') +2023-03-09 04:38:37,599 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3677, 5.9338, 5.4307, 5.6795, 5.3655, 5.2915, 5.9030, 5.8711], + device='cuda:1'), covar=tensor([0.1218, 0.0698, 0.0449, 0.0715, 0.1516, 0.0785, 0.0615, 0.0740], + device='cuda:1'), in_proj_covar=tensor([0.0544, 0.0458, 0.0343, 0.0490, 0.0670, 0.0492, 0.0642, 0.0484], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 04:39:23,869 INFO [train.py:898] (1/4) Epoch 13, batch 2000, loss[loss=0.1851, simple_loss=0.2748, pruned_loss=0.04773, over 18628.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2678, pruned_loss=0.04789, over 3587507.86 frames. ], batch size: 52, lr: 8.73e-03, grad_scale: 8.0 +2023-03-09 04:39:53,526 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-09 04:40:17,738 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.026e+02 2.791e+02 3.256e+02 3.955e+02 1.111e+03, threshold=6.512e+02, percent-clipped=4.0 +2023-03-09 04:40:23,327 INFO [train.py:898] (1/4) Epoch 13, batch 2050, loss[loss=0.2011, simple_loss=0.289, pruned_loss=0.05661, over 18409.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2672, pruned_loss=0.04729, over 3587380.48 frames. ], batch size: 52, lr: 8.73e-03, grad_scale: 8.0 +2023-03-09 04:41:14,570 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45702.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:41:17,274 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45704.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 04:41:22,677 INFO [train.py:898] (1/4) Epoch 13, batch 2100, loss[loss=0.1613, simple_loss=0.2463, pruned_loss=0.03815, over 18236.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2677, pruned_loss=0.04734, over 3582117.02 frames. ], batch size: 45, lr: 8.72e-03, grad_scale: 8.0 +2023-03-09 04:41:26,357 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45712.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 04:41:27,688 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9380, 4.7518, 4.8717, 3.7966, 3.9313, 3.7695, 2.8433, 2.5603], + device='cuda:1'), covar=tensor([0.0206, 0.0120, 0.0059, 0.0216, 0.0262, 0.0171, 0.0640, 0.0839], + device='cuda:1'), in_proj_covar=tensor([0.0063, 0.0049, 0.0052, 0.0061, 0.0083, 0.0060, 0.0072, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0004, 0.0006, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-09 04:41:55,685 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45737.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:42:14,746 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.079e+02 3.037e+02 3.571e+02 4.287e+02 1.143e+03, threshold=7.142e+02, percent-clipped=3.0 +2023-03-09 04:42:21,817 INFO [train.py:898] (1/4) Epoch 13, batch 2150, loss[loss=0.1832, simple_loss=0.276, pruned_loss=0.04516, over 15986.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2679, pruned_loss=0.0475, over 3582513.69 frames. ], batch size: 94, lr: 8.72e-03, grad_scale: 8.0 +2023-03-09 04:42:23,184 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45760.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:42:26,696 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45763.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:42:38,839 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45774.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:42:51,017 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45785.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:43:05,507 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.61 vs. limit=5.0 +2023-03-09 04:43:20,399 INFO [train.py:898] (1/4) Epoch 13, batch 2200, loss[loss=0.1708, simple_loss=0.2467, pruned_loss=0.04749, over 18418.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2679, pruned_loss=0.04751, over 3583835.06 frames. ], batch size: 43, lr: 8.72e-03, grad_scale: 8.0 +2023-03-09 04:43:24,824 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9888, 4.6490, 4.8106, 3.5885, 3.8422, 3.7864, 2.6511, 2.6726], + device='cuda:1'), covar=tensor([0.0200, 0.0135, 0.0074, 0.0242, 0.0318, 0.0178, 0.0699, 0.0757], + device='cuda:1'), in_proj_covar=tensor([0.0061, 0.0049, 0.0051, 0.0060, 0.0082, 0.0059, 0.0071, 0.0078], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0006, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-09 04:43:50,783 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45835.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:44:12,841 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.979e+02 3.106e+02 3.775e+02 4.323e+02 7.775e+02, threshold=7.551e+02, percent-clipped=4.0 +2023-03-09 04:44:18,542 INFO [train.py:898] (1/4) Epoch 13, batch 2250, loss[loss=0.2205, simple_loss=0.2931, pruned_loss=0.07396, over 12398.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2678, pruned_loss=0.04763, over 3585496.70 frames. ], batch size: 129, lr: 8.71e-03, grad_scale: 8.0 +2023-03-09 04:44:18,920 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45859.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:44:18,958 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7273, 3.6696, 3.5214, 3.1994, 3.4165, 2.5511, 2.4297, 3.7910], + device='cuda:1'), covar=tensor([0.0040, 0.0076, 0.0077, 0.0106, 0.0073, 0.0195, 0.0258, 0.0050], + device='cuda:1'), in_proj_covar=tensor([0.0105, 0.0126, 0.0109, 0.0158, 0.0110, 0.0156, 0.0159, 0.0094], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 04:45:17,118 INFO [train.py:898] (1/4) Epoch 13, batch 2300, loss[loss=0.1861, simple_loss=0.2662, pruned_loss=0.05304, over 18493.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2682, pruned_loss=0.04784, over 3580863.05 frames. ], batch size: 47, lr: 8.71e-03, grad_scale: 8.0 +2023-03-09 04:45:31,281 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45920.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:45:50,421 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.4946, 3.8040, 2.4161, 3.6795, 4.5860, 2.4057, 3.4405, 3.7261], + device='cuda:1'), covar=tensor([0.0137, 0.1057, 0.1586, 0.0623, 0.0069, 0.1315, 0.0719, 0.0702], + device='cuda:1'), in_proj_covar=tensor([0.0129, 0.0241, 0.0195, 0.0191, 0.0095, 0.0175, 0.0203, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 04:46:07,503 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3354, 5.9025, 5.4962, 5.6934, 5.4523, 5.3604, 5.9316, 5.8816], + device='cuda:1'), covar=tensor([0.1241, 0.0762, 0.0462, 0.0685, 0.1600, 0.0727, 0.0610, 0.0758], + device='cuda:1'), in_proj_covar=tensor([0.0548, 0.0458, 0.0343, 0.0491, 0.0672, 0.0496, 0.0644, 0.0485], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 04:46:10,649 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.145e+02 3.100e+02 3.461e+02 4.036e+02 9.492e+02, threshold=6.921e+02, percent-clipped=1.0 +2023-03-09 04:46:16,238 INFO [train.py:898] (1/4) Epoch 13, batch 2350, loss[loss=0.1848, simple_loss=0.2734, pruned_loss=0.04814, over 18249.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2678, pruned_loss=0.04761, over 3567290.20 frames. ], batch size: 60, lr: 8.70e-03, grad_scale: 8.0 +2023-03-09 04:46:29,798 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.4405, 5.0398, 5.0148, 4.9524, 4.5786, 4.8799, 4.3310, 4.8486], + device='cuda:1'), covar=tensor([0.0260, 0.0258, 0.0208, 0.0389, 0.0331, 0.0234, 0.1119, 0.0336], + device='cuda:1'), in_proj_covar=tensor([0.0188, 0.0236, 0.0225, 0.0271, 0.0238, 0.0235, 0.0293, 0.0227], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 04:47:14,901 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46004.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 04:47:20,483 INFO [train.py:898] (1/4) Epoch 13, batch 2400, loss[loss=0.1579, simple_loss=0.2506, pruned_loss=0.03263, over 18270.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2676, pruned_loss=0.04767, over 3556133.33 frames. ], batch size: 49, lr: 8.70e-03, grad_scale: 8.0 +2023-03-09 04:48:10,984 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46052.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 04:48:14,688 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.917e+02 3.063e+02 3.864e+02 4.591e+02 1.180e+03, threshold=7.727e+02, percent-clipped=5.0 +2023-03-09 04:48:18,392 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46058.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:48:19,206 INFO [train.py:898] (1/4) Epoch 13, batch 2450, loss[loss=0.1708, simple_loss=0.2495, pruned_loss=0.04605, over 18271.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2687, pruned_loss=0.04819, over 3553516.14 frames. ], batch size: 45, lr: 8.69e-03, grad_scale: 8.0 +2023-03-09 04:48:46,665 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3551, 2.6495, 3.8534, 3.6097, 2.5395, 4.1085, 3.6521, 2.5863], + device='cuda:1'), covar=tensor([0.0467, 0.1286, 0.0237, 0.0272, 0.1448, 0.0212, 0.0475, 0.0997], + device='cuda:1'), in_proj_covar=tensor([0.0196, 0.0224, 0.0166, 0.0147, 0.0212, 0.0192, 0.0215, 0.0191], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 04:49:18,294 INFO [train.py:898] (1/4) Epoch 13, batch 2500, loss[loss=0.1643, simple_loss=0.2372, pruned_loss=0.0457, over 18424.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2688, pruned_loss=0.04808, over 3564257.11 frames. ], batch size: 43, lr: 8.69e-03, grad_scale: 8.0 +2023-03-09 04:49:42,824 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46130.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:50:11,946 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.884e+02 3.026e+02 3.575e+02 4.443e+02 8.459e+02, threshold=7.149e+02, percent-clipped=1.0 +2023-03-09 04:50:16,986 INFO [train.py:898] (1/4) Epoch 13, batch 2550, loss[loss=0.1643, simple_loss=0.2454, pruned_loss=0.04161, over 18383.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2682, pruned_loss=0.04786, over 3563366.24 frames. ], batch size: 42, lr: 8.68e-03, grad_scale: 8.0 +2023-03-09 04:51:15,928 INFO [train.py:898] (1/4) Epoch 13, batch 2600, loss[loss=0.1787, simple_loss=0.2663, pruned_loss=0.04559, over 18580.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2683, pruned_loss=0.04792, over 3561881.47 frames. ], batch size: 54, lr: 8.68e-03, grad_scale: 8.0 +2023-03-09 04:51:23,700 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46215.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:51:25,021 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6542, 3.5883, 5.0329, 4.4781, 3.3840, 3.0111, 4.5103, 5.2460], + device='cuda:1'), covar=tensor([0.0818, 0.1595, 0.0160, 0.0298, 0.0856, 0.1074, 0.0346, 0.0239], + device='cuda:1'), in_proj_covar=tensor([0.0139, 0.0253, 0.0114, 0.0164, 0.0181, 0.0178, 0.0177, 0.0161], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 04:52:08,291 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0636, 4.1855, 2.6562, 4.2614, 5.1569, 2.4364, 3.9010, 4.0593], + device='cuda:1'), covar=tensor([0.0121, 0.1114, 0.1476, 0.0532, 0.0067, 0.1321, 0.0609, 0.0681], + device='cuda:1'), in_proj_covar=tensor([0.0130, 0.0243, 0.0196, 0.0191, 0.0096, 0.0175, 0.0205, 0.0210], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 04:52:10,073 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.796e+02 3.019e+02 3.586e+02 4.587e+02 8.864e+02, threshold=7.172e+02, percent-clipped=4.0 +2023-03-09 04:52:14,654 INFO [train.py:898] (1/4) Epoch 13, batch 2650, loss[loss=0.1908, simple_loss=0.2761, pruned_loss=0.05272, over 18153.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2682, pruned_loss=0.04772, over 3575540.69 frames. ], batch size: 44, lr: 8.67e-03, grad_scale: 8.0 +2023-03-09 04:52:15,310 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.66 vs. limit=5.0 +2023-03-09 04:52:18,990 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7700, 4.6688, 4.8336, 4.5296, 4.5466, 4.6231, 4.9285, 4.9021], + device='cuda:1'), covar=tensor([0.0065, 0.0087, 0.0059, 0.0092, 0.0072, 0.0125, 0.0070, 0.0099], + device='cuda:1'), in_proj_covar=tensor([0.0084, 0.0060, 0.0062, 0.0079, 0.0066, 0.0090, 0.0076, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 04:52:35,476 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46276.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:52:37,795 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9377, 4.1109, 2.3597, 4.1007, 5.0583, 2.4012, 3.6636, 3.8250], + device='cuda:1'), covar=tensor([0.0124, 0.1090, 0.1652, 0.0570, 0.0065, 0.1320, 0.0684, 0.0768], + device='cuda:1'), in_proj_covar=tensor([0.0130, 0.0243, 0.0195, 0.0191, 0.0096, 0.0175, 0.0205, 0.0210], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 04:53:14,123 INFO [train.py:898] (1/4) Epoch 13, batch 2700, loss[loss=0.177, simple_loss=0.272, pruned_loss=0.04106, over 18581.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2676, pruned_loss=0.04732, over 3572791.38 frames. ], batch size: 54, lr: 8.67e-03, grad_scale: 8.0 +2023-03-09 04:53:16,873 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9087, 3.1224, 4.5687, 4.0261, 2.7060, 4.8082, 4.0429, 3.2397], + device='cuda:1'), covar=tensor([0.0398, 0.1327, 0.0184, 0.0332, 0.1611, 0.0168, 0.0486, 0.0896], + device='cuda:1'), in_proj_covar=tensor([0.0198, 0.0227, 0.0166, 0.0149, 0.0215, 0.0195, 0.0217, 0.0192], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 04:53:47,852 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46337.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:54:08,569 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.122e+02 2.828e+02 3.269e+02 4.141e+02 7.295e+02, threshold=6.537e+02, percent-clipped=1.0 +2023-03-09 04:54:12,465 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46358.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:54:13,379 INFO [train.py:898] (1/4) Epoch 13, batch 2750, loss[loss=0.1788, simple_loss=0.2707, pruned_loss=0.0435, over 18583.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2678, pruned_loss=0.04747, over 3571600.45 frames. ], batch size: 54, lr: 8.66e-03, grad_scale: 8.0 +2023-03-09 04:54:17,155 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8553, 4.5918, 4.7419, 3.4627, 3.8382, 3.6782, 2.7051, 2.8445], + device='cuda:1'), covar=tensor([0.0193, 0.0131, 0.0071, 0.0282, 0.0298, 0.0189, 0.0661, 0.0766], + device='cuda:1'), in_proj_covar=tensor([0.0061, 0.0050, 0.0052, 0.0061, 0.0084, 0.0060, 0.0073, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0006, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-09 04:54:41,099 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1759, 5.4576, 2.8051, 5.3117, 5.1922, 5.4659, 5.3112, 2.7270], + device='cuda:1'), covar=tensor([0.0158, 0.0070, 0.0765, 0.0067, 0.0075, 0.0077, 0.0094, 0.0994], + device='cuda:1'), in_proj_covar=tensor([0.0079, 0.0071, 0.0089, 0.0087, 0.0079, 0.0067, 0.0078, 0.0093], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 04:55:09,401 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46406.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:55:12,692 INFO [train.py:898] (1/4) Epoch 13, batch 2800, loss[loss=0.2357, simple_loss=0.3027, pruned_loss=0.08431, over 12374.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2678, pruned_loss=0.04738, over 3575038.39 frames. ], batch size: 129, lr: 8.66e-03, grad_scale: 8.0 +2023-03-09 04:55:38,045 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46430.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:56:07,313 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.145e+02 3.038e+02 3.515e+02 4.163e+02 1.533e+03, threshold=7.029e+02, percent-clipped=2.0 +2023-03-09 04:56:11,758 INFO [train.py:898] (1/4) Epoch 13, batch 2850, loss[loss=0.1828, simple_loss=0.2739, pruned_loss=0.04584, over 17805.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2678, pruned_loss=0.04748, over 3580226.97 frames. ], batch size: 70, lr: 8.65e-03, grad_scale: 8.0 +2023-03-09 04:56:34,786 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46478.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:57:10,821 INFO [train.py:898] (1/4) Epoch 13, batch 2900, loss[loss=0.1737, simple_loss=0.2543, pruned_loss=0.04655, over 18308.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2684, pruned_loss=0.04791, over 3572845.88 frames. ], batch size: 49, lr: 8.65e-03, grad_scale: 8.0 +2023-03-09 04:57:18,197 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46515.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:58:05,314 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.838e+02 2.971e+02 3.539e+02 4.206e+02 8.689e+02, threshold=7.079e+02, percent-clipped=3.0 +2023-03-09 04:58:09,926 INFO [train.py:898] (1/4) Epoch 13, batch 2950, loss[loss=0.1748, simple_loss=0.263, pruned_loss=0.0433, over 18632.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2675, pruned_loss=0.0471, over 3585356.34 frames. ], batch size: 52, lr: 8.65e-03, grad_scale: 8.0 +2023-03-09 04:58:14,677 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46563.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:58:52,136 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46595.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:59:01,167 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46602.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:59:09,198 INFO [train.py:898] (1/4) Epoch 13, batch 3000, loss[loss=0.1851, simple_loss=0.2745, pruned_loss=0.04788, over 18377.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2664, pruned_loss=0.04676, over 3586426.41 frames. ], batch size: 50, lr: 8.64e-03, grad_scale: 8.0 +2023-03-09 04:59:09,199 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 04:59:18,071 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0313, 4.9703, 4.5657, 4.8397, 4.8782, 4.3499, 4.8417, 4.5770], + device='cuda:1'), covar=tensor([0.0333, 0.0433, 0.1208, 0.0839, 0.0557, 0.0468, 0.0385, 0.0977], + device='cuda:1'), in_proj_covar=tensor([0.0421, 0.0491, 0.0648, 0.0388, 0.0374, 0.0445, 0.0474, 0.0612], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 04:59:21,019 INFO [train.py:932] (1/4) Epoch 13, validation: loss=0.1542, simple_loss=0.256, pruned_loss=0.02615, over 944034.00 frames. +2023-03-09 04:59:21,020 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 04:59:47,884 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46632.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:00:07,785 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5760, 2.1188, 2.5417, 2.6155, 3.3130, 5.0649, 4.6418, 3.7740], + device='cuda:1'), covar=tensor([0.1415, 0.2100, 0.2783, 0.1535, 0.1908, 0.0124, 0.0354, 0.0598], + device='cuda:1'), in_proj_covar=tensor([0.0259, 0.0314, 0.0337, 0.0254, 0.0367, 0.0200, 0.0271, 0.0220], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 05:00:15,393 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.844e+02 2.916e+02 3.524e+02 4.268e+02 9.781e+02, threshold=7.048e+02, percent-clipped=5.0 +2023-03-09 05:00:17,085 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46656.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 05:00:19,951 INFO [train.py:898] (1/4) Epoch 13, batch 3050, loss[loss=0.1832, simple_loss=0.2623, pruned_loss=0.05204, over 18490.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2666, pruned_loss=0.04683, over 3594492.17 frames. ], batch size: 47, lr: 8.64e-03, grad_scale: 8.0 +2023-03-09 05:00:24,760 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46663.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:01:18,395 INFO [train.py:898] (1/4) Epoch 13, batch 3100, loss[loss=0.198, simple_loss=0.2918, pruned_loss=0.05204, over 16168.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2669, pruned_loss=0.04679, over 3604766.58 frames. ], batch size: 94, lr: 8.63e-03, grad_scale: 8.0 +2023-03-09 05:01:58,254 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5333, 3.3931, 3.2765, 2.9185, 3.2465, 2.4147, 2.5559, 3.3879], + device='cuda:1'), covar=tensor([0.0059, 0.0093, 0.0097, 0.0143, 0.0111, 0.0245, 0.0228, 0.0077], + device='cuda:1'), in_proj_covar=tensor([0.0108, 0.0131, 0.0112, 0.0164, 0.0115, 0.0160, 0.0163, 0.0096], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 05:02:12,200 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.124e+02 3.204e+02 3.767e+02 4.418e+02 1.241e+03, threshold=7.535e+02, percent-clipped=5.0 +2023-03-09 05:02:16,820 INFO [train.py:898] (1/4) Epoch 13, batch 3150, loss[loss=0.1906, simple_loss=0.2826, pruned_loss=0.04934, over 18140.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2669, pruned_loss=0.04697, over 3593819.30 frames. ], batch size: 62, lr: 8.63e-03, grad_scale: 8.0 +2023-03-09 05:02:43,920 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4647, 3.2258, 1.9403, 4.2525, 2.9785, 3.9390, 2.0071, 3.5803], + device='cuda:1'), covar=tensor([0.0614, 0.0837, 0.1658, 0.0414, 0.0802, 0.0271, 0.1525, 0.0520], + device='cuda:1'), in_proj_covar=tensor([0.0199, 0.0215, 0.0183, 0.0252, 0.0181, 0.0248, 0.0193, 0.0191], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 05:03:16,830 INFO [train.py:898] (1/4) Epoch 13, batch 3200, loss[loss=0.2033, simple_loss=0.2833, pruned_loss=0.06161, over 18471.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.266, pruned_loss=0.0468, over 3586012.99 frames. ], batch size: 59, lr: 8.62e-03, grad_scale: 8.0 +2023-03-09 05:03:18,456 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46810.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:03:38,410 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2905, 4.4661, 2.6585, 4.3069, 5.3774, 2.7249, 3.7339, 4.1233], + device='cuda:1'), covar=tensor([0.0138, 0.1038, 0.1550, 0.0623, 0.0067, 0.1310, 0.0762, 0.0634], + device='cuda:1'), in_proj_covar=tensor([0.0131, 0.0242, 0.0195, 0.0191, 0.0097, 0.0176, 0.0206, 0.0210], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 05:03:59,619 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6020, 2.1016, 2.6412, 2.6444, 3.1928, 4.8231, 4.3537, 3.7505], + device='cuda:1'), covar=tensor([0.1396, 0.2261, 0.2444, 0.1534, 0.1940, 0.0161, 0.0411, 0.0596], + device='cuda:1'), in_proj_covar=tensor([0.0261, 0.0318, 0.0339, 0.0256, 0.0370, 0.0203, 0.0273, 0.0222], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 05:04:10,409 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.338e+02 3.187e+02 3.650e+02 4.694e+02 1.027e+03, threshold=7.300e+02, percent-clipped=4.0 +2023-03-09 05:04:15,569 INFO [train.py:898] (1/4) Epoch 13, batch 3250, loss[loss=0.2078, simple_loss=0.2915, pruned_loss=0.06208, over 16347.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2663, pruned_loss=0.04712, over 3582294.49 frames. ], batch size: 95, lr: 8.62e-03, grad_scale: 8.0 +2023-03-09 05:04:29,717 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46871.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:04:34,928 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6444, 2.0492, 2.6627, 2.6751, 3.2622, 4.7382, 4.3598, 3.9477], + device='cuda:1'), covar=tensor([0.1346, 0.2248, 0.2253, 0.1441, 0.1746, 0.0170, 0.0394, 0.0505], + device='cuda:1'), in_proj_covar=tensor([0.0260, 0.0317, 0.0338, 0.0255, 0.0369, 0.0202, 0.0273, 0.0221], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 05:05:14,070 INFO [train.py:898] (1/4) Epoch 13, batch 3300, loss[loss=0.1568, simple_loss=0.2455, pruned_loss=0.03403, over 18250.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2669, pruned_loss=0.04763, over 3556018.89 frames. ], batch size: 47, lr: 8.61e-03, grad_scale: 8.0 +2023-03-09 05:05:14,837 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-09 05:05:41,184 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46932.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:05:57,641 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9146, 3.8785, 4.0101, 3.8539, 3.8854, 3.8737, 4.0285, 4.0290], + device='cuda:1'), covar=tensor([0.0084, 0.0080, 0.0068, 0.0086, 0.0067, 0.0107, 0.0071, 0.0089], + device='cuda:1'), in_proj_covar=tensor([0.0084, 0.0060, 0.0063, 0.0080, 0.0067, 0.0091, 0.0077, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 05:05:58,004 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 05:06:03,351 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46951.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 05:06:07,665 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.054e+02 2.913e+02 3.556e+02 4.553e+02 1.517e+03, threshold=7.113e+02, percent-clipped=7.0 +2023-03-09 05:06:11,485 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46958.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:06:12,401 INFO [train.py:898] (1/4) Epoch 13, batch 3350, loss[loss=0.164, simple_loss=0.2472, pruned_loss=0.0404, over 18382.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2669, pruned_loss=0.04766, over 3551190.44 frames. ], batch size: 42, lr: 8.61e-03, grad_scale: 8.0 +2023-03-09 05:06:38,289 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46980.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:07:12,264 INFO [train.py:898] (1/4) Epoch 13, batch 3400, loss[loss=0.1576, simple_loss=0.2337, pruned_loss=0.04074, over 18510.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2665, pruned_loss=0.04722, over 3562004.73 frames. ], batch size: 44, lr: 8.60e-03, grad_scale: 8.0 +2023-03-09 05:07:13,670 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2449, 5.1665, 5.4425, 5.3160, 5.0939, 5.9572, 5.5706, 5.3724], + device='cuda:1'), covar=tensor([0.0977, 0.0652, 0.0749, 0.0699, 0.1338, 0.0716, 0.0634, 0.1531], + device='cuda:1'), in_proj_covar=tensor([0.0318, 0.0250, 0.0268, 0.0266, 0.0301, 0.0375, 0.0245, 0.0368], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:1') +2023-03-09 05:08:07,266 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.855e+02 2.739e+02 3.192e+02 3.909e+02 7.528e+02, threshold=6.383e+02, percent-clipped=0.0 +2023-03-09 05:08:11,954 INFO [train.py:898] (1/4) Epoch 13, batch 3450, loss[loss=0.1881, simple_loss=0.2781, pruned_loss=0.04903, over 18612.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2663, pruned_loss=0.04686, over 3572886.01 frames. ], batch size: 52, lr: 8.60e-03, grad_scale: 8.0 +2023-03-09 05:09:11,349 INFO [train.py:898] (1/4) Epoch 13, batch 3500, loss[loss=0.1862, simple_loss=0.2693, pruned_loss=0.05155, over 18289.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2654, pruned_loss=0.04654, over 3573498.10 frames. ], batch size: 49, lr: 8.60e-03, grad_scale: 8.0 +2023-03-09 05:10:03,841 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.269e+02 2.980e+02 3.583e+02 4.065e+02 6.381e+02, threshold=7.167e+02, percent-clipped=1.0 +2023-03-09 05:10:08,140 INFO [train.py:898] (1/4) Epoch 13, batch 3550, loss[loss=0.2324, simple_loss=0.305, pruned_loss=0.07994, over 12456.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2658, pruned_loss=0.04665, over 3569033.11 frames. ], batch size: 130, lr: 8.59e-03, grad_scale: 8.0 +2023-03-09 05:10:15,863 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47166.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:10:30,715 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4139, 5.9903, 5.4650, 5.7461, 5.5108, 5.4596, 6.0256, 5.9503], + device='cuda:1'), covar=tensor([0.1141, 0.0667, 0.0434, 0.0693, 0.1492, 0.0641, 0.0538, 0.0719], + device='cuda:1'), in_proj_covar=tensor([0.0545, 0.0450, 0.0336, 0.0484, 0.0655, 0.0483, 0.0638, 0.0473], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 05:11:02,853 INFO [train.py:898] (1/4) Epoch 13, batch 3600, loss[loss=0.1898, simple_loss=0.2814, pruned_loss=0.04907, over 18226.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2669, pruned_loss=0.047, over 3563854.42 frames. ], batch size: 60, lr: 8.59e-03, grad_scale: 8.0 +2023-03-09 05:11:34,799 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.2797, 3.0760, 3.9560, 3.6156, 3.0203, 2.9600, 3.6653, 3.9666], + device='cuda:1'), covar=tensor([0.0827, 0.1186, 0.0161, 0.0369, 0.0772, 0.0901, 0.0367, 0.0409], + device='cuda:1'), in_proj_covar=tensor([0.0140, 0.0257, 0.0118, 0.0167, 0.0182, 0.0180, 0.0180, 0.0164], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 05:11:36,519 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47241.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:12:09,411 INFO [train.py:898] (1/4) Epoch 14, batch 0, loss[loss=0.1817, simple_loss=0.2729, pruned_loss=0.04528, over 18369.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2729, pruned_loss=0.04528, over 18369.00 frames. ], batch size: 56, lr: 8.27e-03, grad_scale: 8.0 +2023-03-09 05:12:09,412 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 05:12:21,330 INFO [train.py:932] (1/4) Epoch 14, validation: loss=0.155, simple_loss=0.2569, pruned_loss=0.0266, over 944034.00 frames. +2023-03-09 05:12:21,331 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 05:12:31,195 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47251.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:12:35,444 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.980e+02 3.395e+02 4.114e+02 5.070e+02 1.381e+03, threshold=8.228e+02, percent-clipped=11.0 +2023-03-09 05:12:39,266 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47258.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:12:48,128 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8249, 4.7631, 4.8094, 4.6711, 4.6943, 4.7320, 4.9758, 5.0321], + device='cuda:1'), covar=tensor([0.0070, 0.0088, 0.0070, 0.0103, 0.0072, 0.0117, 0.0109, 0.0114], + device='cuda:1'), in_proj_covar=tensor([0.0082, 0.0058, 0.0062, 0.0078, 0.0065, 0.0089, 0.0075, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 05:13:19,492 INFO [train.py:898] (1/4) Epoch 14, batch 50, loss[loss=0.1763, simple_loss=0.263, pruned_loss=0.04482, over 17031.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2634, pruned_loss=0.04476, over 819385.65 frames. ], batch size: 78, lr: 8.27e-03, grad_scale: 8.0 +2023-03-09 05:13:26,881 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47299.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:13:30,391 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47302.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:13:35,439 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47306.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:14:18,548 INFO [train.py:898] (1/4) Epoch 14, batch 100, loss[loss=0.1925, simple_loss=0.2845, pruned_loss=0.05022, over 18297.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2653, pruned_loss=0.04637, over 1424962.31 frames. ], batch size: 57, lr: 8.26e-03, grad_scale: 8.0 +2023-03-09 05:14:29,901 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5549, 3.5438, 5.0427, 4.4531, 3.2321, 2.9345, 4.3661, 5.2277], + device='cuda:1'), covar=tensor([0.0886, 0.1453, 0.0136, 0.0341, 0.0957, 0.1194, 0.0388, 0.0137], + device='cuda:1'), in_proj_covar=tensor([0.0136, 0.0250, 0.0115, 0.0163, 0.0178, 0.0176, 0.0176, 0.0159], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 05:14:32,863 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.774e+02 2.933e+02 3.364e+02 4.152e+02 6.819e+02, threshold=6.727e+02, percent-clipped=0.0 +2023-03-09 05:15:02,543 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47379.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:15:16,208 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.3467, 4.3293, 4.4708, 4.2637, 4.2354, 4.3252, 4.5573, 4.5058], + device='cuda:1'), covar=tensor([0.0065, 0.0073, 0.0060, 0.0090, 0.0069, 0.0109, 0.0081, 0.0092], + device='cuda:1'), in_proj_covar=tensor([0.0083, 0.0059, 0.0062, 0.0078, 0.0066, 0.0090, 0.0076, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 05:15:18,124 INFO [train.py:898] (1/4) Epoch 14, batch 150, loss[loss=0.1733, simple_loss=0.2629, pruned_loss=0.04184, over 18541.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2673, pruned_loss=0.04722, over 1891038.62 frames. ], batch size: 49, lr: 8.26e-03, grad_scale: 8.0 +2023-03-09 05:16:15,646 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47440.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:16:18,720 INFO [train.py:898] (1/4) Epoch 14, batch 200, loss[loss=0.1896, simple_loss=0.277, pruned_loss=0.05112, over 18277.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2649, pruned_loss=0.04602, over 2278445.56 frames. ], batch size: 57, lr: 8.25e-03, grad_scale: 8.0 +2023-03-09 05:16:25,414 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-09 05:16:32,554 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.856e+02 2.798e+02 3.533e+02 4.067e+02 1.064e+03, threshold=7.066e+02, percent-clipped=5.0 +2023-03-09 05:16:36,986 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8786, 4.9006, 4.9300, 4.7260, 4.7070, 4.8311, 5.0992, 5.0401], + device='cuda:1'), covar=tensor([0.0067, 0.0069, 0.0064, 0.0095, 0.0066, 0.0108, 0.0074, 0.0098], + device='cuda:1'), in_proj_covar=tensor([0.0084, 0.0060, 0.0063, 0.0080, 0.0067, 0.0091, 0.0077, 0.0077], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 05:16:46,986 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47466.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:17:18,341 INFO [train.py:898] (1/4) Epoch 14, batch 250, loss[loss=0.1851, simple_loss=0.2745, pruned_loss=0.04787, over 18358.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2656, pruned_loss=0.04631, over 2558183.72 frames. ], batch size: 55, lr: 8.25e-03, grad_scale: 8.0 +2023-03-09 05:17:43,435 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47514.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:18:16,907 INFO [train.py:898] (1/4) Epoch 14, batch 300, loss[loss=0.2123, simple_loss=0.2888, pruned_loss=0.06788, over 12945.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2658, pruned_loss=0.04656, over 2781685.41 frames. ], batch size: 130, lr: 8.24e-03, grad_scale: 8.0 +2023-03-09 05:18:30,796 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.041e+02 3.040e+02 3.610e+02 4.364e+02 8.752e+02, threshold=7.221e+02, percent-clipped=2.0 +2023-03-09 05:19:16,486 INFO [train.py:898] (1/4) Epoch 14, batch 350, loss[loss=0.2142, simple_loss=0.3029, pruned_loss=0.0628, over 17685.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2662, pruned_loss=0.04649, over 2966298.69 frames. ], batch size: 70, lr: 8.24e-03, grad_scale: 8.0 +2023-03-09 05:19:21,109 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47597.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:20:15,233 INFO [train.py:898] (1/4) Epoch 14, batch 400, loss[loss=0.1875, simple_loss=0.2745, pruned_loss=0.05025, over 18288.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2661, pruned_loss=0.04658, over 3095565.63 frames. ], batch size: 57, lr: 8.24e-03, grad_scale: 8.0 +2023-03-09 05:20:28,443 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.702e+02 2.895e+02 3.399e+02 4.132e+02 9.040e+02, threshold=6.798e+02, percent-clipped=2.0 +2023-03-09 05:21:14,182 INFO [train.py:898] (1/4) Epoch 14, batch 450, loss[loss=0.1801, simple_loss=0.271, pruned_loss=0.04462, over 18496.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.267, pruned_loss=0.04701, over 3203243.66 frames. ], batch size: 53, lr: 8.23e-03, grad_scale: 8.0 +2023-03-09 05:21:20,825 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-09 05:21:25,412 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-09 05:22:04,126 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47735.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:22:07,639 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47738.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:22:13,607 INFO [train.py:898] (1/4) Epoch 14, batch 500, loss[loss=0.1923, simple_loss=0.2834, pruned_loss=0.0506, over 18362.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2663, pruned_loss=0.04623, over 3281540.15 frames. ], batch size: 56, lr: 8.23e-03, grad_scale: 8.0 +2023-03-09 05:22:27,495 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.904e+02 2.746e+02 3.457e+02 4.087e+02 8.380e+02, threshold=6.915e+02, percent-clipped=1.0 +2023-03-09 05:22:40,846 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5323, 3.5371, 4.9638, 4.1240, 2.9633, 2.7159, 4.0848, 5.0008], + device='cuda:1'), covar=tensor([0.0862, 0.1645, 0.0154, 0.0445, 0.1097, 0.1409, 0.0527, 0.0300], + device='cuda:1'), in_proj_covar=tensor([0.0142, 0.0258, 0.0118, 0.0170, 0.0184, 0.0184, 0.0183, 0.0167], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 05:22:50,305 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.55 vs. limit=5.0 +2023-03-09 05:23:06,884 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47788.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:23:12,625 INFO [train.py:898] (1/4) Epoch 14, batch 550, loss[loss=0.2055, simple_loss=0.2874, pruned_loss=0.06182, over 18321.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2659, pruned_loss=0.04612, over 3354531.80 frames. ], batch size: 57, lr: 8.22e-03, grad_scale: 8.0 +2023-03-09 05:23:20,545 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47799.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:23:31,819 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47809.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 05:24:12,015 INFO [train.py:898] (1/4) Epoch 14, batch 600, loss[loss=0.1754, simple_loss=0.2687, pruned_loss=0.04109, over 18401.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2661, pruned_loss=0.04631, over 3389690.92 frames. ], batch size: 52, lr: 8.22e-03, grad_scale: 8.0 +2023-03-09 05:24:19,683 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47849.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:24:25,847 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.360e+02 2.943e+02 3.448e+02 4.485e+02 1.001e+03, threshold=6.896e+02, percent-clipped=3.0 +2023-03-09 05:24:43,093 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47870.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 05:25:07,440 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-09 05:25:10,122 INFO [train.py:898] (1/4) Epoch 14, batch 650, loss[loss=0.1957, simple_loss=0.2804, pruned_loss=0.05552, over 18330.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2656, pruned_loss=0.04612, over 3441805.07 frames. ], batch size: 56, lr: 8.21e-03, grad_scale: 8.0 +2023-03-09 05:25:15,387 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47897.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:25:37,869 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4366, 5.3890, 5.0520, 5.3004, 5.3651, 4.7095, 5.2884, 5.0254], + device='cuda:1'), covar=tensor([0.0385, 0.0458, 0.1252, 0.0815, 0.0491, 0.0413, 0.0389, 0.1038], + device='cuda:1'), in_proj_covar=tensor([0.0435, 0.0503, 0.0664, 0.0396, 0.0385, 0.0457, 0.0487, 0.0627], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 05:25:43,489 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9497, 5.0065, 5.1251, 5.1216, 4.8981, 5.6302, 5.3555, 5.0751], + device='cuda:1'), covar=tensor([0.1144, 0.0831, 0.0690, 0.0848, 0.1611, 0.0873, 0.0635, 0.1626], + device='cuda:1'), in_proj_covar=tensor([0.0332, 0.0261, 0.0271, 0.0276, 0.0311, 0.0385, 0.0252, 0.0379], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:1') +2023-03-09 05:26:07,522 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.84 vs. limit=2.0 +2023-03-09 05:26:09,523 INFO [train.py:898] (1/4) Epoch 14, batch 700, loss[loss=0.1825, simple_loss=0.2692, pruned_loss=0.04787, over 18273.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2646, pruned_loss=0.04569, over 3478510.27 frames. ], batch size: 57, lr: 8.21e-03, grad_scale: 8.0 +2023-03-09 05:26:12,031 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47945.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:26:19,344 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-09 05:26:23,809 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.052e+02 2.964e+02 3.596e+02 4.648e+02 7.874e+02, threshold=7.192e+02, percent-clipped=3.0 +2023-03-09 05:26:59,061 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9460, 3.7484, 5.0548, 3.0289, 4.3122, 2.4944, 3.2266, 2.0034], + device='cuda:1'), covar=tensor([0.1008, 0.0832, 0.0124, 0.0739, 0.0621, 0.2487, 0.2541, 0.1757], + device='cuda:1'), in_proj_covar=tensor([0.0206, 0.0224, 0.0134, 0.0178, 0.0236, 0.0252, 0.0296, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 05:27:08,304 INFO [train.py:898] (1/4) Epoch 14, batch 750, loss[loss=0.1625, simple_loss=0.241, pruned_loss=0.04196, over 18395.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2645, pruned_loss=0.04561, over 3505622.08 frames. ], batch size: 42, lr: 8.21e-03, grad_scale: 16.0 +2023-03-09 05:28:03,358 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48035.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:28:12,896 INFO [train.py:898] (1/4) Epoch 14, batch 800, loss[loss=0.1946, simple_loss=0.2702, pruned_loss=0.05956, over 18508.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2639, pruned_loss=0.0453, over 3527232.68 frames. ], batch size: 47, lr: 8.20e-03, grad_scale: 8.0 +2023-03-09 05:28:28,356 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.329e+02 3.109e+02 3.556e+02 4.248e+02 1.366e+03, threshold=7.111e+02, percent-clipped=4.0 +2023-03-09 05:28:47,995 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48072.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:29:00,406 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48083.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:29:11,643 INFO [train.py:898] (1/4) Epoch 14, batch 850, loss[loss=0.1547, simple_loss=0.2384, pruned_loss=0.0355, over 18571.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2637, pruned_loss=0.04533, over 3542975.17 frames. ], batch size: 45, lr: 8.20e-03, grad_scale: 8.0 +2023-03-09 05:29:13,078 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48094.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:29:59,995 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48133.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:30:10,895 INFO [train.py:898] (1/4) Epoch 14, batch 900, loss[loss=0.1877, simple_loss=0.2853, pruned_loss=0.04509, over 16073.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2639, pruned_loss=0.04565, over 3551773.21 frames. ], batch size: 94, lr: 8.19e-03, grad_scale: 8.0 +2023-03-09 05:30:12,179 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48144.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:30:26,934 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.713e+02 2.967e+02 3.519e+02 4.098e+02 9.060e+02, threshold=7.037e+02, percent-clipped=1.0 +2023-03-09 05:30:38,205 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48165.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 05:31:10,229 INFO [train.py:898] (1/4) Epoch 14, batch 950, loss[loss=0.2068, simple_loss=0.2939, pruned_loss=0.05979, over 18611.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2639, pruned_loss=0.04551, over 3564859.95 frames. ], batch size: 52, lr: 8.19e-03, grad_scale: 8.0 +2023-03-09 05:31:31,928 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-03-09 05:31:34,362 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48212.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 05:31:45,174 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.3849, 4.3512, 4.4607, 4.2732, 4.2914, 4.3479, 4.5641, 4.5099], + device='cuda:1'), covar=tensor([0.0069, 0.0068, 0.0067, 0.0090, 0.0061, 0.0111, 0.0067, 0.0108], + device='cuda:1'), in_proj_covar=tensor([0.0083, 0.0059, 0.0063, 0.0079, 0.0066, 0.0090, 0.0075, 0.0076], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 05:32:09,657 INFO [train.py:898] (1/4) Epoch 14, batch 1000, loss[loss=0.1783, simple_loss=0.2798, pruned_loss=0.03841, over 18493.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2649, pruned_loss=0.04573, over 3570380.48 frames. ], batch size: 51, lr: 8.19e-03, grad_scale: 8.0 +2023-03-09 05:32:26,021 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.854e+02 2.987e+02 3.542e+02 4.398e+02 9.134e+02, threshold=7.083e+02, percent-clipped=3.0 +2023-03-09 05:32:46,761 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48273.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 05:32:50,123 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4343, 6.0647, 5.5278, 5.8264, 5.6284, 5.4925, 6.1134, 6.0750], + device='cuda:1'), covar=tensor([0.1308, 0.0717, 0.0399, 0.0660, 0.1429, 0.0687, 0.0539, 0.0616], + device='cuda:1'), in_proj_covar=tensor([0.0550, 0.0466, 0.0342, 0.0492, 0.0668, 0.0492, 0.0649, 0.0481], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 05:32:55,851 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6442, 3.6505, 5.2164, 4.5671, 3.4028, 3.3006, 4.7014, 5.4078], + device='cuda:1'), covar=tensor([0.0814, 0.1810, 0.0123, 0.0345, 0.0872, 0.0990, 0.0306, 0.0187], + device='cuda:1'), in_proj_covar=tensor([0.0139, 0.0258, 0.0118, 0.0170, 0.0181, 0.0181, 0.0183, 0.0166], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 05:33:09,316 INFO [train.py:898] (1/4) Epoch 14, batch 1050, loss[loss=0.1624, simple_loss=0.2468, pruned_loss=0.03901, over 18406.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2653, pruned_loss=0.0459, over 3577225.88 frames. ], batch size: 42, lr: 8.18e-03, grad_scale: 8.0 +2023-03-09 05:33:46,610 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48324.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:34:08,880 INFO [train.py:898] (1/4) Epoch 14, batch 1100, loss[loss=0.175, simple_loss=0.2619, pruned_loss=0.04408, over 18345.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2656, pruned_loss=0.04604, over 3576759.82 frames. ], batch size: 56, lr: 8.18e-03, grad_scale: 8.0 +2023-03-09 05:34:23,497 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.341e+02 3.220e+02 3.737e+02 4.236e+02 6.762e+02, threshold=7.474e+02, percent-clipped=0.0 +2023-03-09 05:34:59,337 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48385.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:35:08,047 INFO [train.py:898] (1/4) Epoch 14, batch 1150, loss[loss=0.1855, simple_loss=0.2706, pruned_loss=0.05022, over 18377.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2657, pruned_loss=0.04599, over 3580113.72 frames. ], batch size: 50, lr: 8.17e-03, grad_scale: 8.0 +2023-03-09 05:35:09,492 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48394.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:35:12,832 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48397.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:35:23,519 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7476, 3.6187, 5.0226, 2.9062, 4.3922, 2.4835, 3.0618, 1.8806], + device='cuda:1'), covar=tensor([0.1057, 0.0861, 0.0103, 0.0771, 0.0516, 0.2420, 0.2479, 0.1810], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0226, 0.0135, 0.0180, 0.0239, 0.0253, 0.0300, 0.0216], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 05:35:40,810 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5548, 1.9994, 2.3900, 2.5380, 2.9232, 4.7045, 4.3300, 3.5937], + device='cuda:1'), covar=tensor([0.1594, 0.2711, 0.3336, 0.1761, 0.2818, 0.0201, 0.0448, 0.0629], + device='cuda:1'), in_proj_covar=tensor([0.0261, 0.0317, 0.0337, 0.0257, 0.0368, 0.0202, 0.0273, 0.0222], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 05:35:44,647 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48423.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:35:49,931 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48428.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:36:06,281 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48442.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:36:07,210 INFO [train.py:898] (1/4) Epoch 14, batch 1200, loss[loss=0.1613, simple_loss=0.2448, pruned_loss=0.03886, over 18372.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2648, pruned_loss=0.04559, over 3584916.01 frames. ], batch size: 46, lr: 8.17e-03, grad_scale: 8.0 +2023-03-09 05:36:08,633 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48444.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:36:22,329 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.965e+02 3.035e+02 3.656e+02 4.283e+02 1.032e+03, threshold=7.311e+02, percent-clipped=2.0 +2023-03-09 05:36:23,915 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4007, 2.7270, 2.4255, 2.5933, 3.5565, 3.5010, 3.0019, 2.7909], + device='cuda:1'), covar=tensor([0.0183, 0.0248, 0.0536, 0.0395, 0.0149, 0.0145, 0.0366, 0.0371], + device='cuda:1'), in_proj_covar=tensor([0.0123, 0.0114, 0.0152, 0.0143, 0.0108, 0.0095, 0.0136, 0.0137], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 05:36:25,033 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48458.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:36:32,240 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.39 vs. limit=5.0 +2023-03-09 05:36:33,544 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48465.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 05:36:49,431 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0869, 5.1093, 5.1960, 4.8975, 4.8007, 4.9694, 5.3042, 5.2485], + device='cuda:1'), covar=tensor([0.0061, 0.0056, 0.0053, 0.0093, 0.0060, 0.0135, 0.0062, 0.0089], + device='cuda:1'), in_proj_covar=tensor([0.0084, 0.0060, 0.0064, 0.0081, 0.0067, 0.0091, 0.0076, 0.0077], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 05:36:56,419 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48484.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:37:05,996 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48492.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:37:06,946 INFO [train.py:898] (1/4) Epoch 14, batch 1250, loss[loss=0.1911, simple_loss=0.2803, pruned_loss=0.05093, over 18291.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2646, pruned_loss=0.04539, over 3575999.56 frames. ], batch size: 57, lr: 8.16e-03, grad_scale: 8.0 +2023-03-09 05:37:30,403 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48513.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 05:37:42,522 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6547, 3.6567, 3.5264, 3.0886, 3.3915, 2.6590, 2.6836, 3.7472], + device='cuda:1'), covar=tensor([0.0053, 0.0067, 0.0068, 0.0119, 0.0088, 0.0182, 0.0186, 0.0052], + device='cuda:1'), in_proj_covar=tensor([0.0111, 0.0136, 0.0117, 0.0167, 0.0118, 0.0163, 0.0165, 0.0097], + device='cuda:1'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 05:38:04,704 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2723, 5.1572, 5.4580, 5.4569, 5.1386, 6.0593, 5.5870, 5.3080], + device='cuda:1'), covar=tensor([0.1024, 0.0659, 0.0738, 0.0712, 0.1461, 0.0740, 0.0664, 0.1609], + device='cuda:1'), in_proj_covar=tensor([0.0325, 0.0256, 0.0268, 0.0272, 0.0307, 0.0379, 0.0249, 0.0373], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:1') +2023-03-09 05:38:06,757 INFO [train.py:898] (1/4) Epoch 14, batch 1300, loss[loss=0.1599, simple_loss=0.2358, pruned_loss=0.04195, over 17711.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2648, pruned_loss=0.04527, over 3587684.33 frames. ], batch size: 39, lr: 8.16e-03, grad_scale: 8.0 +2023-03-09 05:38:21,327 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.028e+02 2.821e+02 3.494e+02 4.256e+02 8.799e+02, threshold=6.987e+02, percent-clipped=3.0 +2023-03-09 05:38:35,197 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48568.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 05:39:05,488 INFO [train.py:898] (1/4) Epoch 14, batch 1350, loss[loss=0.1628, simple_loss=0.2549, pruned_loss=0.03532, over 18357.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2646, pruned_loss=0.04517, over 3592604.31 frames. ], batch size: 46, lr: 8.16e-03, grad_scale: 8.0 +2023-03-09 05:39:29,080 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5812, 3.6725, 4.9725, 4.3133, 3.2665, 3.0838, 4.2775, 5.1572], + device='cuda:1'), covar=tensor([0.0800, 0.1514, 0.0137, 0.0341, 0.0851, 0.1007, 0.0388, 0.0209], + device='cuda:1'), in_proj_covar=tensor([0.0137, 0.0253, 0.0117, 0.0167, 0.0178, 0.0178, 0.0181, 0.0164], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 05:40:05,008 INFO [train.py:898] (1/4) Epoch 14, batch 1400, loss[loss=0.1876, simple_loss=0.278, pruned_loss=0.04862, over 18317.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2649, pruned_loss=0.04524, over 3586233.66 frames. ], batch size: 56, lr: 8.15e-03, grad_scale: 8.0 +2023-03-09 05:40:19,853 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.683e+02 2.979e+02 3.618e+02 4.043e+02 7.873e+02, threshold=7.235e+02, percent-clipped=2.0 +2023-03-09 05:40:48,855 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48680.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:41:04,632 INFO [train.py:898] (1/4) Epoch 14, batch 1450, loss[loss=0.1794, simple_loss=0.2697, pruned_loss=0.04455, over 18303.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2641, pruned_loss=0.04497, over 3589659.84 frames. ], batch size: 54, lr: 8.15e-03, grad_scale: 8.0 +2023-03-09 05:41:28,003 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0496, 5.5475, 2.7577, 5.3769, 5.2277, 5.6237, 5.3551, 2.9988], + device='cuda:1'), covar=tensor([0.0187, 0.0050, 0.0791, 0.0063, 0.0072, 0.0047, 0.0077, 0.0856], + device='cuda:1'), in_proj_covar=tensor([0.0079, 0.0071, 0.0089, 0.0085, 0.0078, 0.0066, 0.0078, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 05:41:45,694 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48728.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:41:56,083 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0949, 5.0925, 5.1447, 4.8949, 4.8583, 4.9568, 5.2991, 5.2789], + device='cuda:1'), covar=tensor([0.0081, 0.0066, 0.0062, 0.0110, 0.0066, 0.0116, 0.0082, 0.0133], + device='cuda:1'), in_proj_covar=tensor([0.0086, 0.0061, 0.0064, 0.0083, 0.0068, 0.0093, 0.0077, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 05:42:03,618 INFO [train.py:898] (1/4) Epoch 14, batch 1500, loss[loss=0.2459, simple_loss=0.3132, pruned_loss=0.08927, over 12650.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2627, pruned_loss=0.04468, over 3590680.29 frames. ], batch size: 129, lr: 8.14e-03, grad_scale: 8.0 +2023-03-09 05:42:15,646 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48753.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:42:18,856 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.036e+02 2.981e+02 3.460e+02 4.226e+02 1.044e+03, threshold=6.921e+02, percent-clipped=2.0 +2023-03-09 05:42:42,176 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48776.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:42:45,492 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48779.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:42:45,624 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9396, 5.4732, 5.4518, 5.4334, 4.9436, 5.3894, 4.7511, 5.3422], + device='cuda:1'), covar=tensor([0.0243, 0.0241, 0.0172, 0.0341, 0.0378, 0.0198, 0.0997, 0.0282], + device='cuda:1'), in_proj_covar=tensor([0.0192, 0.0237, 0.0226, 0.0273, 0.0240, 0.0236, 0.0293, 0.0230], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 05:42:49,663 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9671, 3.2936, 4.3775, 4.0959, 2.7585, 4.7222, 4.0960, 3.0024], + device='cuda:1'), covar=tensor([0.0344, 0.1144, 0.0239, 0.0264, 0.1385, 0.0177, 0.0464, 0.0882], + device='cuda:1'), in_proj_covar=tensor([0.0196, 0.0225, 0.0169, 0.0146, 0.0214, 0.0189, 0.0218, 0.0190], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 05:43:00,967 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.87 vs. limit=5.0 +2023-03-09 05:43:02,605 INFO [train.py:898] (1/4) Epoch 14, batch 1550, loss[loss=0.1448, simple_loss=0.2284, pruned_loss=0.03056, over 18424.00 frames. ], tot_loss[loss=0.176, simple_loss=0.263, pruned_loss=0.04454, over 3594327.22 frames. ], batch size: 43, lr: 8.14e-03, grad_scale: 8.0 +2023-03-09 05:43:35,867 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48821.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:43:55,872 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48838.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:44:02,362 INFO [train.py:898] (1/4) Epoch 14, batch 1600, loss[loss=0.1899, simple_loss=0.2709, pruned_loss=0.05444, over 17697.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2637, pruned_loss=0.04498, over 3597235.74 frames. ], batch size: 70, lr: 8.14e-03, grad_scale: 8.0 +2023-03-09 05:44:02,655 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9552, 4.9420, 4.9389, 4.7356, 4.7915, 4.8921, 5.1835, 5.1396], + device='cuda:1'), covar=tensor([0.0059, 0.0055, 0.0063, 0.0095, 0.0053, 0.0104, 0.0059, 0.0085], + device='cuda:1'), in_proj_covar=tensor([0.0084, 0.0060, 0.0063, 0.0081, 0.0067, 0.0091, 0.0076, 0.0077], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 05:44:17,660 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.056e+02 2.816e+02 3.506e+02 4.400e+02 1.387e+03, threshold=7.012e+02, percent-clipped=5.0 +2023-03-09 05:44:31,418 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48868.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 05:44:47,272 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48882.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:44:59,861 INFO [train.py:898] (1/4) Epoch 14, batch 1650, loss[loss=0.1961, simple_loss=0.2874, pruned_loss=0.05241, over 18065.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2646, pruned_loss=0.04541, over 3596138.20 frames. ], batch size: 62, lr: 8.13e-03, grad_scale: 8.0 +2023-03-09 05:45:07,584 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48899.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:45:26,251 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9070, 4.4489, 4.5705, 3.2193, 3.6861, 3.4782, 2.7464, 2.4070], + device='cuda:1'), covar=tensor([0.0194, 0.0175, 0.0077, 0.0320, 0.0341, 0.0191, 0.0685, 0.0875], + device='cuda:1'), in_proj_covar=tensor([0.0061, 0.0051, 0.0052, 0.0062, 0.0082, 0.0058, 0.0073, 0.0080], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0006, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-09 05:45:27,192 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48916.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 05:45:58,717 INFO [train.py:898] (1/4) Epoch 14, batch 1700, loss[loss=0.1665, simple_loss=0.2595, pruned_loss=0.03676, over 16055.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2647, pruned_loss=0.04541, over 3601244.88 frames. ], batch size: 94, lr: 8.13e-03, grad_scale: 8.0 +2023-03-09 05:46:15,066 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.904e+02 3.125e+02 3.663e+02 4.529e+02 8.545e+02, threshold=7.326e+02, percent-clipped=5.0 +2023-03-09 05:46:42,429 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48980.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:46:42,478 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4730, 2.7778, 3.8970, 3.5738, 2.5611, 4.1936, 3.7111, 2.7482], + device='cuda:1'), covar=tensor([0.0447, 0.1338, 0.0255, 0.0319, 0.1446, 0.0176, 0.0549, 0.0945], + device='cuda:1'), in_proj_covar=tensor([0.0201, 0.0231, 0.0174, 0.0150, 0.0219, 0.0194, 0.0224, 0.0194], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 05:46:57,462 INFO [train.py:898] (1/4) Epoch 14, batch 1750, loss[loss=0.2989, simple_loss=0.3555, pruned_loss=0.1212, over 13097.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2652, pruned_loss=0.04572, over 3597200.39 frames. ], batch size: 130, lr: 8.12e-03, grad_scale: 8.0 +2023-03-09 05:47:39,616 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49028.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:47:56,918 INFO [train.py:898] (1/4) Epoch 14, batch 1800, loss[loss=0.1868, simple_loss=0.279, pruned_loss=0.04734, over 18271.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2654, pruned_loss=0.04568, over 3588295.84 frames. ], batch size: 57, lr: 8.12e-03, grad_scale: 8.0 +2023-03-09 05:47:57,371 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4335, 2.8450, 2.3317, 2.7380, 3.5696, 3.5513, 3.0110, 2.9485], + device='cuda:1'), covar=tensor([0.0250, 0.0249, 0.0686, 0.0408, 0.0164, 0.0170, 0.0352, 0.0353], + device='cuda:1'), in_proj_covar=tensor([0.0125, 0.0118, 0.0156, 0.0148, 0.0112, 0.0098, 0.0141, 0.0140], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 05:48:09,362 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49053.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:48:13,403 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.931e+02 2.916e+02 3.364e+02 3.970e+02 9.603e+02, threshold=6.727e+02, percent-clipped=4.0 +2023-03-09 05:48:40,895 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49079.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:48:56,136 INFO [train.py:898] (1/4) Epoch 14, batch 1850, loss[loss=0.2297, simple_loss=0.3019, pruned_loss=0.07875, over 12600.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2656, pruned_loss=0.0457, over 3593925.62 frames. ], batch size: 129, lr: 8.12e-03, grad_scale: 8.0 +2023-03-09 05:49:06,065 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49101.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:49:10,275 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0292, 4.2409, 2.4579, 4.2598, 5.1557, 2.5218, 3.7829, 3.7534], + device='cuda:1'), covar=tensor([0.0123, 0.1063, 0.1734, 0.0553, 0.0068, 0.1374, 0.0714, 0.0876], + device='cuda:1'), in_proj_covar=tensor([0.0137, 0.0250, 0.0196, 0.0194, 0.0100, 0.0179, 0.0209, 0.0216], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 05:49:13,528 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 05:49:37,849 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49127.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:49:40,241 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8200, 4.7261, 2.6319, 4.5456, 4.4874, 4.7539, 4.5584, 2.5822], + device='cuda:1'), covar=tensor([0.0180, 0.0058, 0.0754, 0.0112, 0.0079, 0.0061, 0.0087, 0.0965], + device='cuda:1'), in_proj_covar=tensor([0.0079, 0.0071, 0.0090, 0.0085, 0.0079, 0.0067, 0.0078, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 05:49:55,835 INFO [train.py:898] (1/4) Epoch 14, batch 1900, loss[loss=0.1716, simple_loss=0.2596, pruned_loss=0.04175, over 18353.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2656, pruned_loss=0.04582, over 3583419.52 frames. ], batch size: 56, lr: 8.11e-03, grad_scale: 8.0 +2023-03-09 05:50:11,576 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.722e+02 2.855e+02 3.283e+02 4.141e+02 6.742e+02, threshold=6.565e+02, percent-clipped=1.0 +2023-03-09 05:50:33,788 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6735, 3.8606, 5.1806, 4.5224, 3.2689, 3.2525, 4.6985, 5.4163], + device='cuda:1'), covar=tensor([0.0896, 0.1666, 0.0143, 0.0370, 0.0994, 0.1121, 0.0334, 0.0211], + device='cuda:1'), in_proj_covar=tensor([0.0139, 0.0256, 0.0119, 0.0168, 0.0181, 0.0181, 0.0183, 0.0167], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 05:50:37,030 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49177.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:50:50,666 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9018, 5.2959, 2.5851, 5.1350, 4.9511, 5.2976, 5.0578, 2.3286], + device='cuda:1'), covar=tensor([0.0218, 0.0066, 0.0921, 0.0087, 0.0094, 0.0074, 0.0110, 0.1271], + device='cuda:1'), in_proj_covar=tensor([0.0079, 0.0071, 0.0090, 0.0085, 0.0078, 0.0067, 0.0078, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 05:50:54,816 INFO [train.py:898] (1/4) Epoch 14, batch 1950, loss[loss=0.1519, simple_loss=0.2332, pruned_loss=0.03536, over 18439.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2655, pruned_loss=0.04594, over 3588612.83 frames. ], batch size: 43, lr: 8.11e-03, grad_scale: 8.0 +2023-03-09 05:50:56,142 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49194.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:50:56,861 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-09 05:51:25,792 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.50 vs. limit=2.0 +2023-03-09 05:51:42,432 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9994, 3.2126, 4.3857, 4.0531, 3.0449, 4.8290, 4.1301, 3.1514], + device='cuda:1'), covar=tensor([0.0391, 0.1289, 0.0291, 0.0336, 0.1219, 0.0157, 0.0461, 0.0869], + device='cuda:1'), in_proj_covar=tensor([0.0201, 0.0232, 0.0175, 0.0151, 0.0219, 0.0194, 0.0226, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 05:51:46,872 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49236.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:51:54,543 INFO [train.py:898] (1/4) Epoch 14, batch 2000, loss[loss=0.1716, simple_loss=0.2664, pruned_loss=0.03843, over 18558.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2654, pruned_loss=0.04577, over 3591094.93 frames. ], batch size: 54, lr: 8.10e-03, grad_scale: 8.0 +2023-03-09 05:52:09,966 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.669e+02 2.933e+02 3.417e+02 3.982e+02 2.319e+03, threshold=6.833e+02, percent-clipped=9.0 +2023-03-09 05:52:53,830 INFO [train.py:898] (1/4) Epoch 14, batch 2050, loss[loss=0.1951, simple_loss=0.2818, pruned_loss=0.05422, over 17099.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2645, pruned_loss=0.04536, over 3606911.77 frames. ], batch size: 78, lr: 8.10e-03, grad_scale: 8.0 +2023-03-09 05:52:58,919 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49297.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:53:26,698 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-09 05:53:54,022 INFO [train.py:898] (1/4) Epoch 14, batch 2100, loss[loss=0.1789, simple_loss=0.2641, pruned_loss=0.04685, over 18409.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2642, pruned_loss=0.04529, over 3597157.25 frames. ], batch size: 48, lr: 8.09e-03, grad_scale: 8.0 +2023-03-09 05:54:04,831 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-09 05:54:08,752 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.284e+02 2.957e+02 3.376e+02 4.046e+02 6.341e+02, threshold=6.752e+02, percent-clipped=0.0 +2023-03-09 05:54:53,536 INFO [train.py:898] (1/4) Epoch 14, batch 2150, loss[loss=0.2196, simple_loss=0.2932, pruned_loss=0.07303, over 12235.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2645, pruned_loss=0.04537, over 3592617.06 frames. ], batch size: 129, lr: 8.09e-03, grad_scale: 8.0 +2023-03-09 05:55:27,870 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49422.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:55:39,943 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49432.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 05:55:52,816 INFO [train.py:898] (1/4) Epoch 14, batch 2200, loss[loss=0.1914, simple_loss=0.2814, pruned_loss=0.05066, over 18478.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2647, pruned_loss=0.04567, over 3592075.90 frames. ], batch size: 53, lr: 8.09e-03, grad_scale: 8.0 +2023-03-09 05:56:07,485 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.048e+02 2.848e+02 3.284e+02 4.322e+02 8.537e+02, threshold=6.567e+02, percent-clipped=2.0 +2023-03-09 05:56:33,091 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49477.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:56:37,162 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3909, 5.3470, 4.9808, 5.3041, 5.2750, 4.6325, 5.2089, 4.9597], + device='cuda:1'), covar=tensor([0.0421, 0.0419, 0.1323, 0.0833, 0.0552, 0.0459, 0.0398, 0.0920], + device='cuda:1'), in_proj_covar=tensor([0.0432, 0.0494, 0.0655, 0.0390, 0.0382, 0.0450, 0.0474, 0.0620], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 05:56:40,652 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49483.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:56:52,243 INFO [train.py:898] (1/4) Epoch 14, batch 2250, loss[loss=0.1873, simple_loss=0.2733, pruned_loss=0.05068, over 17177.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.265, pruned_loss=0.04585, over 3587848.95 frames. ], batch size: 78, lr: 8.08e-03, grad_scale: 8.0 +2023-03-09 05:56:52,650 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49493.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 05:56:53,602 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49494.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:57:29,645 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49525.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:57:49,600 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49542.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:57:50,504 INFO [train.py:898] (1/4) Epoch 14, batch 2300, loss[loss=0.1895, simple_loss=0.2876, pruned_loss=0.04565, over 18267.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2662, pruned_loss=0.04615, over 3589891.05 frames. ], batch size: 60, lr: 8.08e-03, grad_scale: 8.0 +2023-03-09 05:58:05,068 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.043e+02 3.294e+02 3.736e+02 4.526e+02 1.029e+03, threshold=7.472e+02, percent-clipped=8.0 +2023-03-09 05:58:47,137 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49592.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:58:48,615 INFO [train.py:898] (1/4) Epoch 14, batch 2350, loss[loss=0.1706, simple_loss=0.2648, pruned_loss=0.03824, over 18500.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2668, pruned_loss=0.0467, over 3578597.40 frames. ], batch size: 53, lr: 8.07e-03, grad_scale: 8.0 +2023-03-09 05:58:52,731 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.2746, 3.8532, 5.1908, 3.0466, 4.6707, 2.7655, 3.1631, 1.9177], + device='cuda:1'), covar=tensor([0.0852, 0.0797, 0.0100, 0.0718, 0.0454, 0.2362, 0.2622, 0.1896], + device='cuda:1'), in_proj_covar=tensor([0.0206, 0.0224, 0.0138, 0.0178, 0.0238, 0.0253, 0.0300, 0.0219], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 05:59:48,059 INFO [train.py:898] (1/4) Epoch 14, batch 2400, loss[loss=0.1639, simple_loss=0.2574, pruned_loss=0.03518, over 18488.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2659, pruned_loss=0.04598, over 3576720.69 frames. ], batch size: 51, lr: 8.07e-03, grad_scale: 8.0 +2023-03-09 06:00:03,324 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.270e+02 3.059e+02 3.720e+02 4.555e+02 1.609e+03, threshold=7.441e+02, percent-clipped=3.0 +2023-03-09 06:00:46,955 INFO [train.py:898] (1/4) Epoch 14, batch 2450, loss[loss=0.1695, simple_loss=0.2575, pruned_loss=0.04075, over 18385.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2658, pruned_loss=0.04607, over 3577535.98 frames. ], batch size: 50, lr: 8.07e-03, grad_scale: 8.0 +2023-03-09 06:01:46,449 INFO [train.py:898] (1/4) Epoch 14, batch 2500, loss[loss=0.1972, simple_loss=0.2925, pruned_loss=0.05097, over 18261.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2657, pruned_loss=0.04632, over 3564373.90 frames. ], batch size: 60, lr: 8.06e-03, grad_scale: 8.0 +2023-03-09 06:02:01,765 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.909e+02 2.668e+02 3.143e+02 3.903e+02 7.073e+02, threshold=6.286e+02, percent-clipped=0.0 +2023-03-09 06:02:13,362 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49766.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:02:27,083 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49778.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:02:39,145 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49788.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 06:02:44,643 INFO [train.py:898] (1/4) Epoch 14, batch 2550, loss[loss=0.1603, simple_loss=0.2429, pruned_loss=0.03883, over 18374.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2649, pruned_loss=0.046, over 3574324.14 frames. ], batch size: 46, lr: 8.06e-03, grad_scale: 8.0 +2023-03-09 06:03:12,288 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8252, 5.3101, 2.6614, 5.1354, 4.9942, 5.3324, 5.0541, 2.5929], + device='cuda:1'), covar=tensor([0.0215, 0.0063, 0.0851, 0.0080, 0.0074, 0.0077, 0.0104, 0.1089], + device='cuda:1'), in_proj_covar=tensor([0.0081, 0.0073, 0.0091, 0.0086, 0.0080, 0.0069, 0.0079, 0.0095], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 06:03:24,850 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49827.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:03:44,087 INFO [train.py:898] (1/4) Epoch 14, batch 2600, loss[loss=0.1611, simple_loss=0.2494, pruned_loss=0.03642, over 18393.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2639, pruned_loss=0.04556, over 3580972.91 frames. ], batch size: 48, lr: 8.05e-03, grad_scale: 8.0 +2023-03-09 06:03:51,963 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8722, 2.5251, 2.2136, 2.3791, 3.0735, 3.0102, 2.7290, 2.5777], + device='cuda:1'), covar=tensor([0.0243, 0.0252, 0.0558, 0.0447, 0.0190, 0.0145, 0.0359, 0.0283], + device='cuda:1'), in_proj_covar=tensor([0.0124, 0.0118, 0.0155, 0.0145, 0.0111, 0.0099, 0.0139, 0.0139], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 06:03:59,921 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.176e+02 2.916e+02 3.456e+02 4.257e+02 7.547e+02, threshold=6.912e+02, percent-clipped=3.0 +2023-03-09 06:04:42,299 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49892.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:04:43,088 INFO [train.py:898] (1/4) Epoch 14, batch 2650, loss[loss=0.1548, simple_loss=0.2403, pruned_loss=0.03465, over 18559.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2648, pruned_loss=0.04572, over 3581289.34 frames. ], batch size: 49, lr: 8.05e-03, grad_scale: 8.0 +2023-03-09 06:05:38,380 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49940.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:05:42,179 INFO [train.py:898] (1/4) Epoch 14, batch 2700, loss[loss=0.2245, simple_loss=0.2927, pruned_loss=0.07815, over 13262.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2652, pruned_loss=0.04584, over 3578064.59 frames. ], batch size: 130, lr: 8.05e-03, grad_scale: 8.0 +2023-03-09 06:05:57,464 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.140e+02 2.972e+02 3.465e+02 4.147e+02 6.859e+02, threshold=6.931e+02, percent-clipped=0.0 +2023-03-09 06:06:07,389 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5028, 5.4652, 5.0478, 5.3962, 5.3249, 4.8548, 5.3271, 5.0570], + device='cuda:1'), covar=tensor([0.0333, 0.0370, 0.1285, 0.0653, 0.0582, 0.0346, 0.0352, 0.0990], + device='cuda:1'), in_proj_covar=tensor([0.0433, 0.0490, 0.0646, 0.0387, 0.0380, 0.0444, 0.0471, 0.0614], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 06:06:40,853 INFO [train.py:898] (1/4) Epoch 14, batch 2750, loss[loss=0.1759, simple_loss=0.2649, pruned_loss=0.04342, over 17988.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2644, pruned_loss=0.04573, over 3579276.14 frames. ], batch size: 65, lr: 8.04e-03, grad_scale: 8.0 +2023-03-09 06:07:32,690 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-09 06:07:43,636 INFO [train.py:898] (1/4) Epoch 14, batch 2800, loss[loss=0.1848, simple_loss=0.2737, pruned_loss=0.04796, over 18366.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2656, pruned_loss=0.04652, over 3575948.19 frames. ], batch size: 56, lr: 8.04e-03, grad_scale: 16.0 +2023-03-09 06:07:58,949 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.149e+02 3.000e+02 3.519e+02 4.137e+02 9.656e+02, threshold=7.037e+02, percent-clipped=4.0 +2023-03-09 06:08:25,683 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50078.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:08:36,988 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50088.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 06:08:42,942 INFO [train.py:898] (1/4) Epoch 14, batch 2850, loss[loss=0.1605, simple_loss=0.2393, pruned_loss=0.04088, over 18393.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2653, pruned_loss=0.04626, over 3585614.64 frames. ], batch size: 42, lr: 8.03e-03, grad_scale: 16.0 +2023-03-09 06:09:17,615 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50122.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:09:22,113 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50126.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:09:24,577 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50128.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:09:33,406 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50136.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 06:09:36,761 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3753, 5.2372, 5.6328, 5.5726, 5.2447, 6.1489, 5.8548, 5.4233], + device='cuda:1'), covar=tensor([0.1033, 0.0609, 0.0735, 0.0640, 0.1387, 0.0756, 0.0488, 0.1685], + device='cuda:1'), in_proj_covar=tensor([0.0331, 0.0261, 0.0277, 0.0279, 0.0314, 0.0386, 0.0255, 0.0376], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:1') +2023-03-09 06:09:40,916 INFO [train.py:898] (1/4) Epoch 14, batch 2900, loss[loss=0.192, simple_loss=0.28, pruned_loss=0.05205, over 16244.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2657, pruned_loss=0.04604, over 3590561.94 frames. ], batch size: 94, lr: 8.03e-03, grad_scale: 16.0 +2023-03-09 06:09:43,991 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5893, 2.8618, 2.5700, 2.9777, 3.5906, 3.5624, 3.1258, 2.9385], + device='cuda:1'), covar=tensor([0.0193, 0.0303, 0.0557, 0.0367, 0.0175, 0.0147, 0.0366, 0.0369], + device='cuda:1'), in_proj_covar=tensor([0.0126, 0.0121, 0.0156, 0.0146, 0.0113, 0.0100, 0.0140, 0.0141], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 06:09:56,508 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.084e+02 3.147e+02 3.827e+02 4.513e+02 7.864e+02, threshold=7.653e+02, percent-clipped=1.0 +2023-03-09 06:10:03,336 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8182, 3.1090, 4.4112, 4.0524, 3.0009, 4.8666, 4.0997, 3.0343], + device='cuda:1'), covar=tensor([0.0484, 0.1391, 0.0260, 0.0353, 0.1332, 0.0162, 0.0506, 0.0987], + device='cuda:1'), in_proj_covar=tensor([0.0202, 0.0231, 0.0176, 0.0150, 0.0217, 0.0196, 0.0224, 0.0193], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 06:10:23,435 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3089, 2.8053, 2.3336, 2.8289, 3.4308, 3.3931, 2.9130, 2.7558], + device='cuda:1'), covar=tensor([0.0196, 0.0239, 0.0572, 0.0327, 0.0202, 0.0141, 0.0360, 0.0324], + device='cuda:1'), in_proj_covar=tensor([0.0127, 0.0122, 0.0157, 0.0147, 0.0114, 0.0101, 0.0141, 0.0142], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 06:10:34,447 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50189.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:10:38,565 INFO [train.py:898] (1/4) Epoch 14, batch 2950, loss[loss=0.2058, simple_loss=0.2873, pruned_loss=0.06218, over 12310.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2658, pruned_loss=0.0462, over 3588585.00 frames. ], batch size: 129, lr: 8.03e-03, grad_scale: 16.0 +2023-03-09 06:10:43,779 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50197.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 06:10:59,734 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.63 vs. limit=5.0 +2023-03-09 06:11:37,782 INFO [train.py:898] (1/4) Epoch 14, batch 3000, loss[loss=0.1874, simple_loss=0.2764, pruned_loss=0.04922, over 18304.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2645, pruned_loss=0.04544, over 3604682.47 frames. ], batch size: 49, lr: 8.02e-03, grad_scale: 16.0 +2023-03-09 06:11:37,783 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 06:11:49,728 INFO [train.py:932] (1/4) Epoch 14, validation: loss=0.1532, simple_loss=0.2546, pruned_loss=0.02587, over 944034.00 frames. +2023-03-09 06:11:49,728 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 06:12:04,123 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.273e+02 3.285e+02 3.966e+02 4.720e+02 1.017e+03, threshold=7.933e+02, percent-clipped=5.0 +2023-03-09 06:12:07,525 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50258.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 06:12:46,947 INFO [train.py:898] (1/4) Epoch 14, batch 3050, loss[loss=0.175, simple_loss=0.2591, pruned_loss=0.04547, over 18372.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2652, pruned_loss=0.04557, over 3606555.77 frames. ], batch size: 50, lr: 8.02e-03, grad_scale: 16.0 +2023-03-09 06:13:45,677 INFO [train.py:898] (1/4) Epoch 14, batch 3100, loss[loss=0.167, simple_loss=0.2584, pruned_loss=0.03783, over 18404.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2642, pruned_loss=0.04518, over 3599898.51 frames. ], batch size: 52, lr: 8.01e-03, grad_scale: 16.0 +2023-03-09 06:14:00,590 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.982e+02 2.722e+02 3.409e+02 4.335e+02 9.164e+02, threshold=6.818e+02, percent-clipped=2.0 +2023-03-09 06:14:42,790 INFO [train.py:898] (1/4) Epoch 14, batch 3150, loss[loss=0.1798, simple_loss=0.2728, pruned_loss=0.04335, over 18074.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2649, pruned_loss=0.04522, over 3602166.77 frames. ], batch size: 65, lr: 8.01e-03, grad_scale: 16.0 +2023-03-09 06:15:18,784 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50422.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:15:25,700 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6243, 2.8827, 2.5497, 2.9208, 3.7170, 3.6696, 3.1542, 3.0428], + device='cuda:1'), covar=tensor([0.0199, 0.0295, 0.0510, 0.0382, 0.0153, 0.0145, 0.0378, 0.0358], + device='cuda:1'), in_proj_covar=tensor([0.0126, 0.0121, 0.0157, 0.0148, 0.0113, 0.0100, 0.0141, 0.0140], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 06:15:28,466 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.41 vs. limit=5.0 +2023-03-09 06:15:39,759 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.45 vs. limit=5.0 +2023-03-09 06:15:42,275 INFO [train.py:898] (1/4) Epoch 14, batch 3200, loss[loss=0.185, simple_loss=0.2723, pruned_loss=0.04887, over 16980.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2647, pruned_loss=0.04533, over 3592393.29 frames. ], batch size: 78, lr: 8.01e-03, grad_scale: 16.0 +2023-03-09 06:15:51,507 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.35 vs. limit=5.0 +2023-03-09 06:15:58,906 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.049e+02 3.101e+02 3.675e+02 4.644e+02 1.158e+03, threshold=7.350e+02, percent-clipped=4.0 +2023-03-09 06:16:14,119 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50470.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:16:30,689 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50484.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:16:34,253 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.31 vs. limit=5.0 +2023-03-09 06:16:40,193 INFO [train.py:898] (1/4) Epoch 14, batch 3250, loss[loss=0.1798, simple_loss=0.2735, pruned_loss=0.04311, over 17294.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2654, pruned_loss=0.04568, over 3588469.19 frames. ], batch size: 78, lr: 8.00e-03, grad_scale: 8.0 +2023-03-09 06:17:00,739 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7150, 5.3285, 5.2245, 5.2602, 4.7767, 5.1598, 4.6234, 5.1639], + device='cuda:1'), covar=tensor([0.0259, 0.0268, 0.0221, 0.0365, 0.0378, 0.0218, 0.1068, 0.0280], + device='cuda:1'), in_proj_covar=tensor([0.0191, 0.0236, 0.0231, 0.0278, 0.0241, 0.0241, 0.0292, 0.0230], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:1') +2023-03-09 06:17:08,569 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4448, 5.4032, 5.0371, 5.3808, 5.3071, 4.7487, 5.2699, 5.0246], + device='cuda:1'), covar=tensor([0.0373, 0.0403, 0.1251, 0.0738, 0.0584, 0.0411, 0.0365, 0.0912], + device='cuda:1'), in_proj_covar=tensor([0.0442, 0.0501, 0.0663, 0.0392, 0.0390, 0.0454, 0.0477, 0.0623], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 06:17:22,708 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-09 06:17:39,139 INFO [train.py:898] (1/4) Epoch 14, batch 3300, loss[loss=0.1619, simple_loss=0.2524, pruned_loss=0.03572, over 18558.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2646, pruned_loss=0.04534, over 3586449.32 frames. ], batch size: 54, lr: 8.00e-03, grad_scale: 8.0 +2023-03-09 06:17:42,674 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50546.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:17:50,975 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50553.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 06:17:55,807 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.637e+02 3.010e+02 3.418e+02 4.071e+02 6.649e+02, threshold=6.837e+02, percent-clipped=0.0 +2023-03-09 06:18:13,058 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-03-09 06:18:33,827 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6952, 2.0737, 2.7288, 2.6128, 3.2163, 4.9233, 4.5086, 3.5211], + device='cuda:1'), covar=tensor([0.1448, 0.2318, 0.2544, 0.1648, 0.2168, 0.0148, 0.0407, 0.0708], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0321, 0.0343, 0.0260, 0.0372, 0.0211, 0.0276, 0.0227], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 06:18:37,895 INFO [train.py:898] (1/4) Epoch 14, batch 3350, loss[loss=0.1751, simple_loss=0.2611, pruned_loss=0.04455, over 18631.00 frames. ], tot_loss[loss=0.178, simple_loss=0.265, pruned_loss=0.04547, over 3582420.58 frames. ], batch size: 52, lr: 8.00e-03, grad_scale: 8.0 +2023-03-09 06:18:55,308 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50607.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:19:05,323 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6474, 4.8980, 4.8932, 4.8479, 4.6354, 5.4039, 5.0431, 4.7532], + device='cuda:1'), covar=tensor([0.1117, 0.0726, 0.0750, 0.0792, 0.1372, 0.0777, 0.0652, 0.1577], + device='cuda:1'), in_proj_covar=tensor([0.0324, 0.0252, 0.0268, 0.0269, 0.0303, 0.0375, 0.0250, 0.0368], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:1') +2023-03-09 06:19:36,721 INFO [train.py:898] (1/4) Epoch 14, batch 3400, loss[loss=0.1813, simple_loss=0.2647, pruned_loss=0.04891, over 18277.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2655, pruned_loss=0.04553, over 3579723.42 frames. ], batch size: 57, lr: 7.99e-03, grad_scale: 8.0 +2023-03-09 06:19:53,299 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.118e+02 2.885e+02 3.458e+02 4.304e+02 7.306e+02, threshold=6.916e+02, percent-clipped=1.0 +2023-03-09 06:20:13,946 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.72 vs. limit=2.0 +2023-03-09 06:20:18,012 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3638, 5.9900, 5.5940, 5.7721, 5.5251, 5.4608, 6.0681, 5.9737], + device='cuda:1'), covar=tensor([0.1277, 0.0743, 0.0376, 0.0709, 0.1311, 0.0717, 0.0521, 0.0732], + device='cuda:1'), in_proj_covar=tensor([0.0554, 0.0467, 0.0347, 0.0500, 0.0683, 0.0503, 0.0666, 0.0497], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 06:20:35,032 INFO [train.py:898] (1/4) Epoch 14, batch 3450, loss[loss=0.185, simple_loss=0.2689, pruned_loss=0.05054, over 18026.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.265, pruned_loss=0.04521, over 3580630.49 frames. ], batch size: 65, lr: 7.99e-03, grad_scale: 8.0 +2023-03-09 06:21:32,280 INFO [train.py:898] (1/4) Epoch 14, batch 3500, loss[loss=0.1815, simple_loss=0.2714, pruned_loss=0.04584, over 18494.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2644, pruned_loss=0.0452, over 3593059.45 frames. ], batch size: 51, lr: 7.98e-03, grad_scale: 8.0 +2023-03-09 06:21:47,966 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.084e+02 3.008e+02 3.421e+02 4.567e+02 7.789e+02, threshold=6.843e+02, percent-clipped=2.0 +2023-03-09 06:22:15,639 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50782.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:22:17,575 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50784.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:22:25,246 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 06:22:26,425 INFO [train.py:898] (1/4) Epoch 14, batch 3550, loss[loss=0.1835, simple_loss=0.2625, pruned_loss=0.05228, over 18580.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2645, pruned_loss=0.04553, over 3593596.39 frames. ], batch size: 45, lr: 7.98e-03, grad_scale: 8.0 +2023-03-09 06:22:41,004 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2619, 5.2423, 4.8782, 5.1857, 5.1605, 4.6086, 5.0950, 4.8814], + device='cuda:1'), covar=tensor([0.0399, 0.0443, 0.1332, 0.0711, 0.0555, 0.0402, 0.0370, 0.0958], + device='cuda:1'), in_proj_covar=tensor([0.0442, 0.0502, 0.0661, 0.0392, 0.0390, 0.0453, 0.0479, 0.0622], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 06:22:43,163 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50808.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 06:23:08,093 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50832.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:23:19,513 INFO [train.py:898] (1/4) Epoch 14, batch 3600, loss[loss=0.1639, simple_loss=0.2422, pruned_loss=0.04285, over 18424.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2651, pruned_loss=0.0459, over 3593547.16 frames. ], batch size: 43, lr: 7.98e-03, grad_scale: 8.0 +2023-03-09 06:23:19,773 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50843.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:23:30,341 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50853.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 06:23:34,503 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.994e+02 3.100e+02 3.552e+02 4.596e+02 8.414e+02, threshold=7.104e+02, percent-clipped=7.0 +2023-03-09 06:23:47,665 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50869.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 06:24:25,705 INFO [train.py:898] (1/4) Epoch 15, batch 0, loss[loss=0.1743, simple_loss=0.2621, pruned_loss=0.04323, over 18523.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2621, pruned_loss=0.04323, over 18523.00 frames. ], batch size: 49, lr: 7.70e-03, grad_scale: 8.0 +2023-03-09 06:24:25,705 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 06:24:32,894 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0084, 2.9841, 1.7903, 3.5185, 2.4731, 3.2661, 2.0389, 3.0956], + device='cuda:1'), covar=tensor([0.0692, 0.0912, 0.1565, 0.0500, 0.1000, 0.0298, 0.1319, 0.0454], + device='cuda:1'), in_proj_covar=tensor([0.0202, 0.0216, 0.0181, 0.0258, 0.0184, 0.0252, 0.0194, 0.0192], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 06:24:37,511 INFO [train.py:932] (1/4) Epoch 15, validation: loss=0.1543, simple_loss=0.2557, pruned_loss=0.02649, over 944034.00 frames. +2023-03-09 06:24:37,512 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 06:24:40,698 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8206, 3.7243, 5.0435, 4.2883, 3.2513, 2.8846, 4.4884, 5.2924], + device='cuda:1'), covar=tensor([0.0786, 0.1454, 0.0165, 0.0400, 0.1001, 0.1287, 0.0390, 0.0172], + device='cuda:1'), in_proj_covar=tensor([0.0139, 0.0256, 0.0122, 0.0170, 0.0182, 0.0182, 0.0183, 0.0167], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 06:25:06,585 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50901.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 06:25:07,717 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50902.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:25:11,448 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.65 vs. limit=2.0 +2023-03-09 06:25:34,588 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0121, 5.4429, 5.4730, 5.4196, 5.0048, 5.3227, 4.7919, 5.3721], + device='cuda:1'), covar=tensor([0.0232, 0.0243, 0.0188, 0.0399, 0.0389, 0.0239, 0.1096, 0.0267], + device='cuda:1'), in_proj_covar=tensor([0.0190, 0.0235, 0.0231, 0.0280, 0.0241, 0.0240, 0.0291, 0.0229], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:1') +2023-03-09 06:25:35,425 INFO [train.py:898] (1/4) Epoch 15, batch 50, loss[loss=0.1463, simple_loss=0.2317, pruned_loss=0.03047, over 18563.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2633, pruned_loss=0.04285, over 825799.14 frames. ], batch size: 45, lr: 7.70e-03, grad_scale: 8.0 +2023-03-09 06:25:58,269 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50946.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:26:11,145 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.078e+02 2.874e+02 3.277e+02 4.282e+02 1.387e+03, threshold=6.554e+02, percent-clipped=5.0 +2023-03-09 06:26:33,991 INFO [train.py:898] (1/4) Epoch 15, batch 100, loss[loss=0.2076, simple_loss=0.2877, pruned_loss=0.06372, over 18477.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2627, pruned_loss=0.04376, over 1441596.53 frames. ], batch size: 59, lr: 7.69e-03, grad_scale: 8.0 +2023-03-09 06:26:41,232 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.54 vs. limit=5.0 +2023-03-09 06:27:10,674 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51007.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:27:33,102 INFO [train.py:898] (1/4) Epoch 15, batch 150, loss[loss=0.1846, simple_loss=0.2727, pruned_loss=0.04824, over 16383.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2622, pruned_loss=0.0442, over 1912602.65 frames. ], batch size: 95, lr: 7.69e-03, grad_scale: 8.0 +2023-03-09 06:27:43,586 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6635, 2.2852, 2.7500, 2.8989, 3.2777, 5.1163, 4.7256, 3.7908], + device='cuda:1'), covar=tensor([0.1542, 0.2116, 0.2884, 0.1494, 0.2143, 0.0156, 0.0345, 0.0651], + device='cuda:1'), in_proj_covar=tensor([0.0265, 0.0321, 0.0346, 0.0259, 0.0371, 0.0209, 0.0277, 0.0226], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 06:28:09,469 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.782e+02 2.876e+02 3.303e+02 4.141e+02 9.283e+02, threshold=6.606e+02, percent-clipped=4.0 +2023-03-09 06:28:15,438 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51062.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:28:31,737 INFO [train.py:898] (1/4) Epoch 15, batch 200, loss[loss=0.1816, simple_loss=0.2668, pruned_loss=0.04818, over 18114.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2626, pruned_loss=0.04429, over 2270866.37 frames. ], batch size: 62, lr: 7.69e-03, grad_scale: 8.0 +2023-03-09 06:28:33,411 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8052, 4.3690, 4.4236, 3.2372, 3.7158, 3.4199, 2.5384, 2.2651], + device='cuda:1'), covar=tensor([0.0208, 0.0139, 0.0091, 0.0326, 0.0289, 0.0220, 0.0776, 0.0961], + device='cuda:1'), in_proj_covar=tensor([0.0064, 0.0053, 0.0055, 0.0064, 0.0084, 0.0060, 0.0074, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 06:29:07,730 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8506, 2.9227, 2.1117, 3.2303, 2.5038, 3.0324, 2.2526, 2.8573], + device='cuda:1'), covar=tensor([0.0536, 0.0636, 0.1100, 0.0628, 0.0719, 0.0329, 0.0916, 0.0420], + device='cuda:1'), in_proj_covar=tensor([0.0206, 0.0219, 0.0184, 0.0264, 0.0187, 0.0256, 0.0197, 0.0194], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 06:29:11,040 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9710, 5.4282, 5.4451, 5.4166, 4.9548, 5.3262, 4.8573, 5.3696], + device='cuda:1'), covar=tensor([0.0186, 0.0231, 0.0159, 0.0295, 0.0304, 0.0186, 0.0876, 0.0207], + device='cuda:1'), in_proj_covar=tensor([0.0190, 0.0235, 0.0231, 0.0281, 0.0242, 0.0239, 0.0291, 0.0228], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:1') +2023-03-09 06:29:25,592 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51123.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:29:29,639 INFO [train.py:898] (1/4) Epoch 15, batch 250, loss[loss=0.157, simple_loss=0.2427, pruned_loss=0.03567, over 18367.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2634, pruned_loss=0.04478, over 2562856.38 frames. ], batch size: 46, lr: 7.68e-03, grad_scale: 8.0 +2023-03-09 06:29:41,471 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51138.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:30:03,161 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.733e+02 3.055e+02 3.759e+02 4.768e+02 8.337e+02, threshold=7.517e+02, percent-clipped=9.0 +2023-03-09 06:30:12,425 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51164.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 06:30:21,669 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51172.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 06:30:27,057 INFO [train.py:898] (1/4) Epoch 15, batch 300, loss[loss=0.2019, simple_loss=0.2886, pruned_loss=0.05758, over 16041.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.263, pruned_loss=0.04486, over 2794341.34 frames. ], batch size: 94, lr: 7.68e-03, grad_scale: 8.0 +2023-03-09 06:30:31,975 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4786, 3.4202, 3.2777, 2.9822, 3.2476, 2.5919, 2.5830, 3.4338], + device='cuda:1'), covar=tensor([0.0051, 0.0084, 0.0071, 0.0111, 0.0073, 0.0169, 0.0167, 0.0065], + device='cuda:1'), in_proj_covar=tensor([0.0116, 0.0138, 0.0119, 0.0172, 0.0123, 0.0168, 0.0169, 0.0100], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 06:30:33,335 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-09 06:30:56,849 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51202.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:31:26,182 INFO [train.py:898] (1/4) Epoch 15, batch 350, loss[loss=0.1402, simple_loss=0.2224, pruned_loss=0.02895, over 18402.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2639, pruned_loss=0.04509, over 2949729.21 frames. ], batch size: 43, lr: 7.67e-03, grad_scale: 8.0 +2023-03-09 06:31:33,457 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51233.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 06:31:41,154 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4812, 2.7465, 3.9422, 3.5601, 2.7013, 4.3134, 3.7259, 2.6389], + device='cuda:1'), covar=tensor([0.0483, 0.1368, 0.0298, 0.0389, 0.1344, 0.0176, 0.0511, 0.1021], + device='cuda:1'), in_proj_covar=tensor([0.0199, 0.0229, 0.0178, 0.0150, 0.0216, 0.0194, 0.0221, 0.0193], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 06:31:52,147 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3255, 4.4479, 2.6481, 4.3949, 5.4871, 3.0227, 4.1770, 4.1333], + device='cuda:1'), covar=tensor([0.0112, 0.0977, 0.1456, 0.0484, 0.0048, 0.1074, 0.0536, 0.0640], + device='cuda:1'), in_proj_covar=tensor([0.0135, 0.0246, 0.0193, 0.0190, 0.0100, 0.0177, 0.0206, 0.0213], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 06:31:53,050 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51250.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:32:01,196 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.843e+02 2.755e+02 3.363e+02 4.160e+02 1.249e+03, threshold=6.726e+02, percent-clipped=1.0 +2023-03-09 06:32:08,451 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9515, 4.6299, 4.7389, 3.5543, 4.0425, 3.8380, 2.8397, 2.4954], + device='cuda:1'), covar=tensor([0.0174, 0.0146, 0.0058, 0.0252, 0.0253, 0.0173, 0.0647, 0.0841], + device='cuda:1'), in_proj_covar=tensor([0.0063, 0.0052, 0.0054, 0.0064, 0.0084, 0.0059, 0.0074, 0.0081], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 06:32:25,068 INFO [train.py:898] (1/4) Epoch 15, batch 400, loss[loss=0.1615, simple_loss=0.2424, pruned_loss=0.04034, over 17680.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2628, pruned_loss=0.04453, over 3102940.66 frames. ], batch size: 39, lr: 7.67e-03, grad_scale: 8.0 +2023-03-09 06:32:53,542 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51302.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:33:14,440 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7731, 3.6385, 4.9678, 2.7997, 4.3573, 2.6551, 3.2039, 1.8866], + device='cuda:1'), covar=tensor([0.1131, 0.0846, 0.0134, 0.0866, 0.0565, 0.2423, 0.2531, 0.1991], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0228, 0.0144, 0.0183, 0.0242, 0.0261, 0.0304, 0.0221], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 06:33:24,242 INFO [train.py:898] (1/4) Epoch 15, batch 450, loss[loss=0.1763, simple_loss=0.2657, pruned_loss=0.0435, over 18512.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.263, pruned_loss=0.04442, over 3211685.66 frames. ], batch size: 53, lr: 7.67e-03, grad_scale: 8.0 +2023-03-09 06:33:59,192 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.915e+02 2.968e+02 3.356e+02 4.062e+02 8.839e+02, threshold=6.712e+02, percent-clipped=1.0 +2023-03-09 06:34:23,280 INFO [train.py:898] (1/4) Epoch 15, batch 500, loss[loss=0.1872, simple_loss=0.2755, pruned_loss=0.04951, over 18625.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2633, pruned_loss=0.04463, over 3290756.42 frames. ], batch size: 52, lr: 7.66e-03, grad_scale: 8.0 +2023-03-09 06:34:29,097 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51382.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:34:34,793 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6322, 3.4466, 4.7534, 4.2507, 2.9459, 2.8685, 4.2011, 4.8416], + device='cuda:1'), covar=tensor([0.0832, 0.1715, 0.0165, 0.0358, 0.1059, 0.1181, 0.0418, 0.0321], + device='cuda:1'), in_proj_covar=tensor([0.0141, 0.0257, 0.0123, 0.0172, 0.0183, 0.0183, 0.0184, 0.0169], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 06:34:40,430 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51392.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:34:51,290 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-03-09 06:35:10,714 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51418.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:35:20,791 INFO [train.py:898] (1/4) Epoch 15, batch 550, loss[loss=0.1742, simple_loss=0.2734, pruned_loss=0.03752, over 18572.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2634, pruned_loss=0.04459, over 3367340.30 frames. ], batch size: 54, lr: 7.66e-03, grad_scale: 8.0 +2023-03-09 06:35:33,756 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51438.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:35:39,462 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51443.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:35:40,733 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6150, 2.1960, 2.7115, 2.4615, 3.3583, 4.9663, 4.5483, 3.5373], + device='cuda:1'), covar=tensor([0.1443, 0.2039, 0.2623, 0.1667, 0.1895, 0.0140, 0.0384, 0.0694], + device='cuda:1'), in_proj_covar=tensor([0.0266, 0.0322, 0.0348, 0.0261, 0.0375, 0.0211, 0.0279, 0.0227], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 06:35:50,547 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51453.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:35:54,566 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.873e+02 3.112e+02 3.809e+02 4.675e+02 7.627e+02, threshold=7.618e+02, percent-clipped=1.0 +2023-03-09 06:36:03,430 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51464.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 06:36:18,193 INFO [train.py:898] (1/4) Epoch 15, batch 600, loss[loss=0.2084, simple_loss=0.2895, pruned_loss=0.06363, over 18386.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2636, pruned_loss=0.04494, over 3408802.94 frames. ], batch size: 56, lr: 7.66e-03, grad_scale: 8.0 +2023-03-09 06:36:29,453 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51486.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:36:39,768 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51495.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:36:50,058 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2108, 5.4177, 3.1747, 5.2718, 5.1906, 5.4642, 5.2766, 2.9967], + device='cuda:1'), covar=tensor([0.0157, 0.0067, 0.0604, 0.0071, 0.0063, 0.0060, 0.0083, 0.0835], + device='cuda:1'), in_proj_covar=tensor([0.0081, 0.0072, 0.0090, 0.0087, 0.0080, 0.0068, 0.0079, 0.0093], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 06:36:58,933 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51512.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 06:37:16,751 INFO [train.py:898] (1/4) Epoch 15, batch 650, loss[loss=0.1789, simple_loss=0.2746, pruned_loss=0.04165, over 18596.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2638, pruned_loss=0.04487, over 3453993.86 frames. ], batch size: 54, lr: 7.65e-03, grad_scale: 8.0 +2023-03-09 06:37:18,065 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51528.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 06:37:21,606 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.3179, 5.5460, 3.0550, 5.3816, 5.2948, 5.5993, 5.3981, 2.9354], + device='cuda:1'), covar=tensor([0.0147, 0.0062, 0.0648, 0.0069, 0.0061, 0.0057, 0.0073, 0.0868], + device='cuda:1'), in_proj_covar=tensor([0.0080, 0.0072, 0.0090, 0.0086, 0.0079, 0.0067, 0.0079, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 06:37:49,924 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2203, 5.2226, 5.2417, 5.0386, 4.9722, 5.0725, 5.4251, 5.4192], + device='cuda:1'), covar=tensor([0.0050, 0.0044, 0.0055, 0.0086, 0.0057, 0.0108, 0.0049, 0.0072], + device='cuda:1'), in_proj_covar=tensor([0.0086, 0.0062, 0.0065, 0.0084, 0.0068, 0.0094, 0.0079, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003], + device='cuda:1') +2023-03-09 06:37:51,240 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51556.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:37:51,903 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.157e+02 2.845e+02 3.389e+02 4.059e+02 1.145e+03, threshold=6.778e+02, percent-clipped=5.0 +2023-03-09 06:38:15,814 INFO [train.py:898] (1/4) Epoch 15, batch 700, loss[loss=0.1633, simple_loss=0.2566, pruned_loss=0.03498, over 18397.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2635, pruned_loss=0.04471, over 3488657.09 frames. ], batch size: 52, lr: 7.65e-03, grad_scale: 8.0 +2023-03-09 06:38:46,188 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51602.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:39:14,428 INFO [train.py:898] (1/4) Epoch 15, batch 750, loss[loss=0.2177, simple_loss=0.3073, pruned_loss=0.06403, over 15953.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2639, pruned_loss=0.04479, over 3515041.47 frames. ], batch size: 94, lr: 7.65e-03, grad_scale: 8.0 +2023-03-09 06:39:22,032 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9366, 3.9338, 3.6787, 3.2879, 3.7708, 3.1236, 3.0425, 3.8750], + device='cuda:1'), covar=tensor([0.0047, 0.0062, 0.0076, 0.0123, 0.0061, 0.0138, 0.0157, 0.0061], + device='cuda:1'), in_proj_covar=tensor([0.0119, 0.0139, 0.0121, 0.0174, 0.0124, 0.0167, 0.0171, 0.0101], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 06:39:25,452 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0213, 4.1752, 2.4400, 4.0036, 5.1414, 2.5083, 3.8097, 3.9105], + device='cuda:1'), covar=tensor([0.0114, 0.1147, 0.1554, 0.0591, 0.0060, 0.1200, 0.0603, 0.0692], + device='cuda:1'), in_proj_covar=tensor([0.0137, 0.0249, 0.0196, 0.0191, 0.0101, 0.0178, 0.0208, 0.0216], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 06:39:42,128 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51650.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:39:49,770 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.857e+02 2.830e+02 3.241e+02 3.850e+02 9.423e+02, threshold=6.481e+02, percent-clipped=1.0 +2023-03-09 06:40:12,997 INFO [train.py:898] (1/4) Epoch 15, batch 800, loss[loss=0.1589, simple_loss=0.2483, pruned_loss=0.03473, over 18476.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2646, pruned_loss=0.04507, over 3522829.86 frames. ], batch size: 44, lr: 7.64e-03, grad_scale: 8.0 +2023-03-09 06:41:01,427 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51718.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:41:04,139 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.56 vs. limit=2.0 +2023-03-09 06:41:11,138 INFO [train.py:898] (1/4) Epoch 15, batch 850, loss[loss=0.1698, simple_loss=0.2624, pruned_loss=0.03863, over 18311.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2636, pruned_loss=0.04457, over 3543745.54 frames. ], batch size: 54, lr: 7.64e-03, grad_scale: 8.0 +2023-03-09 06:41:24,443 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51738.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:41:35,542 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51747.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:41:36,622 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51748.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:41:47,046 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.547e+02 2.965e+02 3.491e+02 4.119e+02 9.035e+02, threshold=6.982e+02, percent-clipped=4.0 +2023-03-09 06:41:57,168 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51766.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:42:09,444 INFO [train.py:898] (1/4) Epoch 15, batch 900, loss[loss=0.2002, simple_loss=0.2876, pruned_loss=0.05643, over 16175.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2648, pruned_loss=0.04513, over 3546714.35 frames. ], batch size: 94, lr: 7.63e-03, grad_scale: 8.0 +2023-03-09 06:42:22,185 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.29 vs. limit=5.0 +2023-03-09 06:42:47,070 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51808.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:43:06,598 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-03-09 06:43:08,028 INFO [train.py:898] (1/4) Epoch 15, batch 950, loss[loss=0.1506, simple_loss=0.2317, pruned_loss=0.03481, over 18429.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.265, pruned_loss=0.04521, over 3557542.37 frames. ], batch size: 43, lr: 7.63e-03, grad_scale: 8.0 +2023-03-09 06:43:09,554 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51828.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 06:43:26,461 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9944, 5.0172, 5.1242, 4.8052, 4.8656, 4.8701, 5.2166, 5.2371], + device='cuda:1'), covar=tensor([0.0063, 0.0053, 0.0064, 0.0094, 0.0056, 0.0107, 0.0064, 0.0079], + device='cuda:1'), in_proj_covar=tensor([0.0086, 0.0062, 0.0065, 0.0084, 0.0069, 0.0094, 0.0080, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003], + device='cuda:1') +2023-03-09 06:43:37,266 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51851.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:43:43,814 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.174e+02 2.846e+02 3.316e+02 4.013e+02 9.556e+02, threshold=6.632e+02, percent-clipped=3.0 +2023-03-09 06:44:06,164 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51876.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 06:44:06,918 INFO [train.py:898] (1/4) Epoch 15, batch 1000, loss[loss=0.2159, simple_loss=0.3024, pruned_loss=0.06466, over 18240.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2651, pruned_loss=0.04511, over 3573992.32 frames. ], batch size: 60, lr: 7.63e-03, grad_scale: 8.0 +2023-03-09 06:44:16,984 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.30 vs. limit=5.0 +2023-03-09 06:44:56,997 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9250, 5.4335, 5.4644, 5.3899, 5.0701, 5.3189, 4.8044, 5.3337], + device='cuda:1'), covar=tensor([0.0216, 0.0251, 0.0155, 0.0352, 0.0278, 0.0218, 0.0994, 0.0271], + device='cuda:1'), in_proj_covar=tensor([0.0193, 0.0240, 0.0232, 0.0284, 0.0244, 0.0242, 0.0292, 0.0233], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:1') +2023-03-09 06:44:58,142 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8287, 4.8171, 4.8776, 4.7083, 4.7136, 4.6870, 5.0614, 5.0800], + device='cuda:1'), covar=tensor([0.0062, 0.0069, 0.0069, 0.0093, 0.0061, 0.0124, 0.0068, 0.0082], + device='cuda:1'), in_proj_covar=tensor([0.0087, 0.0062, 0.0066, 0.0084, 0.0069, 0.0095, 0.0081, 0.0080], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003], + device='cuda:1') +2023-03-09 06:45:05,538 INFO [train.py:898] (1/4) Epoch 15, batch 1050, loss[loss=0.1662, simple_loss=0.256, pruned_loss=0.03815, over 18538.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.265, pruned_loss=0.04483, over 3578911.54 frames. ], batch size: 49, lr: 7.62e-03, grad_scale: 8.0 +2023-03-09 06:45:39,865 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.929e+02 2.891e+02 3.197e+02 3.857e+02 9.406e+02, threshold=6.393e+02, percent-clipped=2.0 +2023-03-09 06:46:03,762 INFO [train.py:898] (1/4) Epoch 15, batch 1100, loss[loss=0.1959, simple_loss=0.2802, pruned_loss=0.05581, over 12049.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2654, pruned_loss=0.0451, over 3565908.55 frames. ], batch size: 129, lr: 7.62e-03, grad_scale: 8.0 +2023-03-09 06:46:50,972 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6497, 3.5542, 2.4662, 4.6176, 3.1437, 4.5899, 2.5181, 4.3986], + device='cuda:1'), covar=tensor([0.0605, 0.0772, 0.1252, 0.0376, 0.0793, 0.0279, 0.1138, 0.0268], + device='cuda:1'), in_proj_covar=tensor([0.0205, 0.0219, 0.0183, 0.0264, 0.0186, 0.0255, 0.0197, 0.0195], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 06:46:56,418 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-09 06:47:02,759 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.5371, 3.8880, 2.3868, 3.6512, 4.7365, 2.4729, 3.3186, 3.7714], + device='cuda:1'), covar=tensor([0.0166, 0.1081, 0.1658, 0.0677, 0.0090, 0.1339, 0.0893, 0.0631], + device='cuda:1'), in_proj_covar=tensor([0.0137, 0.0249, 0.0194, 0.0189, 0.0100, 0.0178, 0.0207, 0.0212], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 06:47:06,930 INFO [train.py:898] (1/4) Epoch 15, batch 1150, loss[loss=0.1691, simple_loss=0.2485, pruned_loss=0.04482, over 18257.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2643, pruned_loss=0.04465, over 3580633.39 frames. ], batch size: 45, lr: 7.62e-03, grad_scale: 8.0 +2023-03-09 06:47:17,968 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-09 06:47:20,004 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52038.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:47:22,368 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52040.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:47:22,439 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.3640, 3.0120, 3.8831, 3.6795, 2.9393, 2.8680, 3.6463, 3.9677], + device='cuda:1'), covar=tensor([0.0824, 0.1188, 0.0198, 0.0339, 0.0823, 0.0954, 0.0365, 0.0346], + device='cuda:1'), in_proj_covar=tensor([0.0141, 0.0257, 0.0122, 0.0172, 0.0183, 0.0182, 0.0183, 0.0170], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 06:47:27,448 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6397, 3.6060, 3.3918, 3.0338, 3.4128, 2.6344, 2.6913, 3.6988], + device='cuda:1'), covar=tensor([0.0051, 0.0070, 0.0082, 0.0121, 0.0081, 0.0186, 0.0190, 0.0047], + device='cuda:1'), in_proj_covar=tensor([0.0120, 0.0142, 0.0122, 0.0176, 0.0126, 0.0170, 0.0175, 0.0102], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 06:47:31,818 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52048.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:47:42,029 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.912e+02 2.945e+02 3.374e+02 4.146e+02 9.750e+02, threshold=6.749e+02, percent-clipped=3.0 +2023-03-09 06:48:05,577 INFO [train.py:898] (1/4) Epoch 15, batch 1200, loss[loss=0.166, simple_loss=0.2575, pruned_loss=0.03722, over 18479.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2638, pruned_loss=0.04435, over 3585447.21 frames. ], batch size: 53, lr: 7.61e-03, grad_scale: 8.0 +2023-03-09 06:48:15,930 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52086.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:48:27,258 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52096.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:48:33,694 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52101.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:48:35,665 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52103.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:49:03,953 INFO [train.py:898] (1/4) Epoch 15, batch 1250, loss[loss=0.1708, simple_loss=0.261, pruned_loss=0.04032, over 18417.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2645, pruned_loss=0.04435, over 3591308.85 frames. ], batch size: 48, lr: 7.61e-03, grad_scale: 8.0 +2023-03-09 06:49:23,606 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4772, 2.7087, 2.4141, 2.8115, 3.5866, 3.5645, 3.0107, 2.8988], + device='cuda:1'), covar=tensor([0.0184, 0.0260, 0.0506, 0.0362, 0.0148, 0.0127, 0.0331, 0.0340], + device='cuda:1'), in_proj_covar=tensor([0.0126, 0.0122, 0.0155, 0.0148, 0.0113, 0.0101, 0.0141, 0.0142], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 06:49:31,476 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52151.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:49:38,915 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.755e+02 2.743e+02 3.530e+02 4.492e+02 1.221e+03, threshold=7.059e+02, percent-clipped=4.0 +2023-03-09 06:49:41,457 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0638, 3.2426, 4.5254, 4.1106, 2.9459, 4.8621, 4.1950, 3.0773], + device='cuda:1'), covar=tensor([0.0381, 0.1147, 0.0195, 0.0277, 0.1224, 0.0155, 0.0371, 0.0904], + device='cuda:1'), in_proj_covar=tensor([0.0199, 0.0228, 0.0177, 0.0149, 0.0215, 0.0192, 0.0222, 0.0191], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 06:50:02,589 INFO [train.py:898] (1/4) Epoch 15, batch 1300, loss[loss=0.1587, simple_loss=0.2433, pruned_loss=0.03707, over 18252.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2634, pruned_loss=0.04401, over 3596273.73 frames. ], batch size: 47, lr: 7.61e-03, grad_scale: 8.0 +2023-03-09 06:50:21,020 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52193.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:50:27,686 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52199.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:51:00,963 INFO [train.py:898] (1/4) Epoch 15, batch 1350, loss[loss=0.1595, simple_loss=0.2386, pruned_loss=0.04024, over 18422.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2637, pruned_loss=0.04438, over 3583083.88 frames. ], batch size: 43, lr: 7.60e-03, grad_scale: 8.0 +2023-03-09 06:51:32,120 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9458, 4.9291, 4.5162, 4.8903, 4.8308, 4.3242, 4.8210, 4.5475], + device='cuda:1'), covar=tensor([0.0449, 0.0482, 0.1461, 0.0654, 0.0576, 0.0449, 0.0422, 0.1096], + device='cuda:1'), in_proj_covar=tensor([0.0449, 0.0507, 0.0665, 0.0396, 0.0390, 0.0461, 0.0489, 0.0629], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 06:51:32,196 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52254.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:51:35,201 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.856e+02 2.890e+02 3.384e+02 4.121e+02 8.952e+02, threshold=6.768e+02, percent-clipped=2.0 +2023-03-09 06:51:59,004 INFO [train.py:898] (1/4) Epoch 15, batch 1400, loss[loss=0.194, simple_loss=0.2794, pruned_loss=0.0543, over 18208.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2641, pruned_loss=0.04437, over 3576439.87 frames. ], batch size: 60, lr: 7.60e-03, grad_scale: 8.0 +2023-03-09 06:52:17,975 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3826, 3.1120, 1.8464, 4.2569, 2.7108, 3.9359, 2.1226, 3.4272], + device='cuda:1'), covar=tensor([0.0592, 0.0947, 0.1754, 0.0456, 0.1020, 0.0343, 0.1418, 0.0598], + device='cuda:1'), in_proj_covar=tensor([0.0205, 0.0218, 0.0183, 0.0263, 0.0187, 0.0255, 0.0197, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 06:52:22,443 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4615, 6.0655, 5.5720, 5.8220, 5.5943, 5.5222, 6.0747, 6.0352], + device='cuda:1'), covar=tensor([0.1091, 0.0588, 0.0377, 0.0549, 0.1345, 0.0634, 0.0509, 0.0597], + device='cuda:1'), in_proj_covar=tensor([0.0567, 0.0475, 0.0354, 0.0504, 0.0691, 0.0500, 0.0674, 0.0504], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 06:52:40,495 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52312.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:52:45,061 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3348, 3.1676, 2.1868, 4.1659, 2.7376, 4.0468, 2.2376, 3.5995], + device='cuda:1'), covar=tensor([0.0607, 0.0840, 0.1359, 0.0411, 0.0906, 0.0264, 0.1242, 0.0430], + device='cuda:1'), in_proj_covar=tensor([0.0205, 0.0218, 0.0184, 0.0264, 0.0188, 0.0257, 0.0198, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 06:52:57,734 INFO [train.py:898] (1/4) Epoch 15, batch 1450, loss[loss=0.1671, simple_loss=0.261, pruned_loss=0.03657, over 17015.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2631, pruned_loss=0.04408, over 3575970.68 frames. ], batch size: 78, lr: 7.59e-03, grad_scale: 8.0 +2023-03-09 06:53:09,828 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9938, 3.4630, 2.5114, 3.4749, 4.0903, 2.6108, 3.3135, 3.4084], + device='cuda:1'), covar=tensor([0.0199, 0.1191, 0.1340, 0.0513, 0.0108, 0.1053, 0.0697, 0.0745], + device='cuda:1'), in_proj_covar=tensor([0.0139, 0.0252, 0.0196, 0.0190, 0.0101, 0.0179, 0.0210, 0.0214], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 06:53:31,867 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.869e+02 2.900e+02 3.474e+02 4.248e+02 1.297e+03, threshold=6.947e+02, percent-clipped=2.0 +2023-03-09 06:53:51,204 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52373.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:53:55,302 INFO [train.py:898] (1/4) Epoch 15, batch 1500, loss[loss=0.2136, simple_loss=0.3033, pruned_loss=0.0619, over 18471.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2631, pruned_loss=0.04407, over 3578275.71 frames. ], batch size: 59, lr: 7.59e-03, grad_scale: 8.0 +2023-03-09 06:54:18,222 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52396.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:54:26,400 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52403.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:54:54,575 INFO [train.py:898] (1/4) Epoch 15, batch 1550, loss[loss=0.1792, simple_loss=0.2721, pruned_loss=0.04317, over 18312.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2628, pruned_loss=0.04396, over 3584876.25 frames. ], batch size: 57, lr: 7.59e-03, grad_scale: 8.0 +2023-03-09 06:55:23,295 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52451.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:55:29,983 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.173e+02 2.872e+02 3.351e+02 3.820e+02 1.204e+03, threshold=6.703e+02, percent-clipped=1.0 +2023-03-09 06:55:31,500 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9273, 5.4718, 5.1725, 5.1763, 5.0183, 4.9973, 5.5209, 5.4339], + device='cuda:1'), covar=tensor([0.1329, 0.0889, 0.0639, 0.0812, 0.1740, 0.0775, 0.0706, 0.0869], + device='cuda:1'), in_proj_covar=tensor([0.0575, 0.0482, 0.0355, 0.0512, 0.0698, 0.0508, 0.0684, 0.0510], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 06:55:54,079 INFO [train.py:898] (1/4) Epoch 15, batch 1600, loss[loss=0.2067, simple_loss=0.2928, pruned_loss=0.0603, over 17309.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2633, pruned_loss=0.0441, over 3576475.74 frames. ], batch size: 78, lr: 7.58e-03, grad_scale: 16.0 +2023-03-09 06:56:15,010 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-09 06:56:39,679 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9574, 5.0206, 5.2113, 5.1929, 4.9461, 5.7742, 5.4001, 5.0627], + device='cuda:1'), covar=tensor([0.1043, 0.0645, 0.0656, 0.0665, 0.1323, 0.0685, 0.0686, 0.1670], + device='cuda:1'), in_proj_covar=tensor([0.0333, 0.0259, 0.0275, 0.0278, 0.0313, 0.0386, 0.0254, 0.0379], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:1') +2023-03-09 06:56:52,754 INFO [train.py:898] (1/4) Epoch 15, batch 1650, loss[loss=0.162, simple_loss=0.2595, pruned_loss=0.03222, over 18499.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2627, pruned_loss=0.04382, over 3588349.94 frames. ], batch size: 53, lr: 7.58e-03, grad_scale: 16.0 +2023-03-09 06:57:18,397 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52549.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:57:25,746 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52555.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 06:57:27,651 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.792e+02 2.929e+02 3.401e+02 4.256e+02 6.511e+02, threshold=6.802e+02, percent-clipped=0.0 +2023-03-09 06:57:33,916 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0577, 3.0639, 4.6648, 4.1222, 2.7373, 4.8078, 4.0791, 3.0867], + device='cuda:1'), covar=tensor([0.0345, 0.1337, 0.0187, 0.0279, 0.1503, 0.0161, 0.0532, 0.0934], + device='cuda:1'), in_proj_covar=tensor([0.0200, 0.0231, 0.0179, 0.0150, 0.0217, 0.0194, 0.0224, 0.0192], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 06:57:50,965 INFO [train.py:898] (1/4) Epoch 15, batch 1700, loss[loss=0.1829, simple_loss=0.2729, pruned_loss=0.04639, over 18330.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2624, pruned_loss=0.04379, over 3577039.79 frames. ], batch size: 55, lr: 7.58e-03, grad_scale: 16.0 +2023-03-09 06:57:55,136 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9394, 5.4542, 5.4180, 5.4512, 4.9534, 5.3608, 4.7499, 5.3085], + device='cuda:1'), covar=tensor([0.0251, 0.0262, 0.0193, 0.0339, 0.0358, 0.0191, 0.1028, 0.0287], + device='cuda:1'), in_proj_covar=tensor([0.0194, 0.0244, 0.0236, 0.0285, 0.0247, 0.0243, 0.0296, 0.0237], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 06:58:06,960 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3189, 5.3026, 4.8154, 5.2350, 5.2003, 4.6357, 5.1404, 4.8960], + device='cuda:1'), covar=tensor([0.0458, 0.0473, 0.1596, 0.0746, 0.0596, 0.0421, 0.0414, 0.1048], + device='cuda:1'), in_proj_covar=tensor([0.0449, 0.0502, 0.0662, 0.0395, 0.0391, 0.0457, 0.0490, 0.0629], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 06:58:37,341 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52616.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 06:58:49,972 INFO [train.py:898] (1/4) Epoch 15, batch 1750, loss[loss=0.1685, simple_loss=0.2503, pruned_loss=0.04336, over 18356.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2626, pruned_loss=0.04382, over 3584612.78 frames. ], batch size: 46, lr: 7.57e-03, grad_scale: 8.0 +2023-03-09 06:59:25,798 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.4113, 2.0617, 2.0482, 2.1437, 2.5093, 2.5363, 2.3961, 2.1448], + device='cuda:1'), covar=tensor([0.0210, 0.0230, 0.0458, 0.0378, 0.0215, 0.0160, 0.0330, 0.0287], + device='cuda:1'), in_proj_covar=tensor([0.0125, 0.0123, 0.0157, 0.0148, 0.0112, 0.0101, 0.0139, 0.0144], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 06:59:26,518 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.883e+02 2.725e+02 3.268e+02 4.051e+02 8.662e+02, threshold=6.535e+02, percent-clipped=2.0 +2023-03-09 06:59:37,957 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52668.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:59:48,454 INFO [train.py:898] (1/4) Epoch 15, batch 1800, loss[loss=0.1966, simple_loss=0.2794, pruned_loss=0.05688, over 17053.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2638, pruned_loss=0.04452, over 3563435.05 frames. ], batch size: 78, lr: 7.57e-03, grad_scale: 8.0 +2023-03-09 07:00:11,348 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52696.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:00:45,351 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 07:00:46,786 INFO [train.py:898] (1/4) Epoch 15, batch 1850, loss[loss=0.1911, simple_loss=0.2781, pruned_loss=0.05204, over 17078.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2637, pruned_loss=0.04458, over 3563264.13 frames. ], batch size: 78, lr: 7.57e-03, grad_scale: 8.0 +2023-03-09 07:00:47,788 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8949, 4.6081, 4.6422, 3.4478, 3.7637, 3.7397, 2.6890, 2.6227], + device='cuda:1'), covar=tensor([0.0203, 0.0147, 0.0088, 0.0320, 0.0330, 0.0207, 0.0752, 0.0828], + device='cuda:1'), in_proj_covar=tensor([0.0064, 0.0052, 0.0054, 0.0063, 0.0083, 0.0060, 0.0073, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0004, 0.0006, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-09 07:01:07,604 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52744.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:01:17,404 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52752.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:01:23,878 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.980e+02 2.718e+02 3.382e+02 4.103e+02 9.791e+02, threshold=6.764e+02, percent-clipped=3.0 +2023-03-09 07:01:46,238 INFO [train.py:898] (1/4) Epoch 15, batch 1900, loss[loss=0.1635, simple_loss=0.2441, pruned_loss=0.04147, over 18403.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.263, pruned_loss=0.04423, over 3569816.85 frames. ], batch size: 42, lr: 7.56e-03, grad_scale: 8.0 +2023-03-09 07:02:12,199 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6050, 2.8691, 2.7172, 2.9798, 3.7034, 3.6821, 3.1398, 2.8842], + device='cuda:1'), covar=tensor([0.0204, 0.0304, 0.0502, 0.0349, 0.0154, 0.0107, 0.0304, 0.0383], + device='cuda:1'), in_proj_covar=tensor([0.0126, 0.0123, 0.0157, 0.0149, 0.0113, 0.0101, 0.0140, 0.0146], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 07:02:29,941 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52813.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:02:45,705 INFO [train.py:898] (1/4) Epoch 15, batch 1950, loss[loss=0.1955, simple_loss=0.2775, pruned_loss=0.0568, over 17953.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2627, pruned_loss=0.04421, over 3564928.11 frames. ], batch size: 65, lr: 7.56e-03, grad_scale: 8.0 +2023-03-09 07:03:12,914 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52849.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:03:22,534 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.851e+02 2.886e+02 3.501e+02 4.238e+02 6.243e+02, threshold=7.003e+02, percent-clipped=0.0 +2023-03-09 07:03:22,975 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7526, 4.4435, 4.5478, 3.2753, 3.6529, 3.6342, 2.5561, 2.4678], + device='cuda:1'), covar=tensor([0.0225, 0.0145, 0.0076, 0.0322, 0.0298, 0.0198, 0.0761, 0.0821], + device='cuda:1'), in_proj_covar=tensor([0.0064, 0.0052, 0.0054, 0.0063, 0.0084, 0.0060, 0.0074, 0.0080], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0004, 0.0006, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-09 07:03:44,569 INFO [train.py:898] (1/4) Epoch 15, batch 2000, loss[loss=0.1663, simple_loss=0.2623, pruned_loss=0.03514, over 16050.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2627, pruned_loss=0.04378, over 3575947.42 frames. ], batch size: 94, lr: 7.56e-03, grad_scale: 8.0 +2023-03-09 07:04:07,201 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-09 07:04:08,642 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52897.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:04:21,921 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6410, 5.1538, 5.1221, 5.1844, 4.6318, 4.9824, 4.4600, 4.9749], + device='cuda:1'), covar=tensor([0.0221, 0.0299, 0.0202, 0.0334, 0.0364, 0.0253, 0.1132, 0.0309], + device='cuda:1'), in_proj_covar=tensor([0.0193, 0.0242, 0.0235, 0.0286, 0.0247, 0.0244, 0.0295, 0.0236], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 07:04:25,170 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52911.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 07:04:43,179 INFO [train.py:898] (1/4) Epoch 15, batch 2050, loss[loss=0.1587, simple_loss=0.2406, pruned_loss=0.03838, over 18397.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2632, pruned_loss=0.04425, over 3583871.66 frames. ], batch size: 42, lr: 7.55e-03, grad_scale: 8.0 +2023-03-09 07:04:43,650 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7790, 3.5467, 5.1783, 2.9802, 4.5895, 2.5316, 3.0432, 1.8200], + device='cuda:1'), covar=tensor([0.1052, 0.0872, 0.0091, 0.0675, 0.0427, 0.2495, 0.2382, 0.1924], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0229, 0.0146, 0.0186, 0.0245, 0.0261, 0.0305, 0.0223], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 07:04:59,878 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.38 vs. limit=5.0 +2023-03-09 07:05:19,675 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.048e+02 3.109e+02 3.661e+02 4.518e+02 9.006e+02, threshold=7.322e+02, percent-clipped=1.0 +2023-03-09 07:05:30,931 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52968.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:05:41,826 INFO [train.py:898] (1/4) Epoch 15, batch 2100, loss[loss=0.1791, simple_loss=0.271, pruned_loss=0.04362, over 17170.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2633, pruned_loss=0.0444, over 3590125.08 frames. ], batch size: 78, lr: 7.55e-03, grad_scale: 8.0 +2023-03-09 07:06:28,619 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53016.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:06:41,350 INFO [train.py:898] (1/4) Epoch 15, batch 2150, loss[loss=0.1892, simple_loss=0.2779, pruned_loss=0.05023, over 17945.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2636, pruned_loss=0.04446, over 3579188.21 frames. ], batch size: 65, lr: 7.54e-03, grad_scale: 8.0 +2023-03-09 07:07:15,741 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53056.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:07:17,594 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.753e+02 2.734e+02 3.607e+02 4.426e+02 8.494e+02, threshold=7.214e+02, percent-clipped=2.0 +2023-03-09 07:07:40,261 INFO [train.py:898] (1/4) Epoch 15, batch 2200, loss[loss=0.1724, simple_loss=0.2625, pruned_loss=0.04119, over 17742.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2638, pruned_loss=0.04446, over 3579736.13 frames. ], batch size: 70, lr: 7.54e-03, grad_scale: 8.0 +2023-03-09 07:08:11,004 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8251, 5.3036, 5.2879, 5.3160, 4.8581, 5.2187, 4.6265, 5.1722], + device='cuda:1'), covar=tensor([0.0234, 0.0280, 0.0209, 0.0323, 0.0341, 0.0204, 0.1165, 0.0301], + device='cuda:1'), in_proj_covar=tensor([0.0197, 0.0245, 0.0237, 0.0289, 0.0249, 0.0246, 0.0298, 0.0238], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 07:08:16,873 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53108.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:08:27,973 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53117.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:08:38,355 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6559, 3.8437, 5.1202, 4.3486, 3.4980, 2.9297, 4.6188, 5.2324], + device='cuda:1'), covar=tensor([0.0764, 0.1343, 0.0156, 0.0400, 0.0818, 0.1172, 0.0319, 0.0235], + device='cuda:1'), in_proj_covar=tensor([0.0141, 0.0258, 0.0125, 0.0172, 0.0182, 0.0183, 0.0183, 0.0171], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 07:08:39,055 INFO [train.py:898] (1/4) Epoch 15, batch 2250, loss[loss=0.1726, simple_loss=0.2619, pruned_loss=0.04166, over 18544.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2629, pruned_loss=0.04408, over 3581618.54 frames. ], batch size: 49, lr: 7.54e-03, grad_scale: 8.0 +2023-03-09 07:09:15,602 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.829e+02 2.883e+02 3.334e+02 3.992e+02 8.132e+02, threshold=6.668e+02, percent-clipped=1.0 +2023-03-09 07:09:37,841 INFO [train.py:898] (1/4) Epoch 15, batch 2300, loss[loss=0.188, simple_loss=0.2762, pruned_loss=0.0499, over 18020.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2622, pruned_loss=0.04352, over 3588523.71 frames. ], batch size: 65, lr: 7.53e-03, grad_scale: 8.0 +2023-03-09 07:09:49,251 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8866, 4.0919, 2.4372, 4.0786, 5.0677, 2.3498, 3.6483, 3.9541], + device='cuda:1'), covar=tensor([0.0141, 0.1003, 0.1558, 0.0504, 0.0062, 0.1321, 0.0691, 0.0656], + device='cuda:1'), in_proj_covar=tensor([0.0141, 0.0251, 0.0196, 0.0188, 0.0101, 0.0179, 0.0209, 0.0215], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 07:09:49,427 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-03-09 07:10:18,766 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53211.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 07:10:37,448 INFO [train.py:898] (1/4) Epoch 15, batch 2350, loss[loss=0.1584, simple_loss=0.2455, pruned_loss=0.0356, over 18267.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2621, pruned_loss=0.04334, over 3584149.58 frames. ], batch size: 47, lr: 7.53e-03, grad_scale: 8.0 +2023-03-09 07:10:42,756 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7626, 5.1033, 5.0731, 5.1157, 4.6851, 5.0256, 4.4962, 4.9996], + device='cuda:1'), covar=tensor([0.0198, 0.0296, 0.0207, 0.0375, 0.0340, 0.0205, 0.0987, 0.0289], + device='cuda:1'), in_proj_covar=tensor([0.0193, 0.0239, 0.0233, 0.0285, 0.0245, 0.0241, 0.0291, 0.0235], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:1') +2023-03-09 07:10:54,296 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53241.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:11:14,250 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.169e+02 3.108e+02 3.704e+02 4.390e+02 1.359e+03, threshold=7.409e+02, percent-clipped=1.0 +2023-03-09 07:11:15,588 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53259.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 07:11:30,138 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-09 07:11:33,075 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.3316, 4.7399, 4.2946, 4.5957, 4.4402, 4.4321, 4.8389, 4.7215], + device='cuda:1'), covar=tensor([0.1139, 0.0794, 0.1917, 0.0758, 0.1406, 0.0676, 0.0681, 0.0768], + device='cuda:1'), in_proj_covar=tensor([0.0576, 0.0483, 0.0355, 0.0512, 0.0700, 0.0508, 0.0687, 0.0512], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 07:11:36,253 INFO [train.py:898] (1/4) Epoch 15, batch 2400, loss[loss=0.1548, simple_loss=0.2355, pruned_loss=0.03709, over 18272.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2623, pruned_loss=0.04353, over 3574312.77 frames. ], batch size: 45, lr: 7.53e-03, grad_scale: 8.0 +2023-03-09 07:11:50,711 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4922, 2.7579, 2.6100, 2.9250, 3.5509, 3.5651, 3.0614, 2.9254], + device='cuda:1'), covar=tensor([0.0195, 0.0341, 0.0560, 0.0381, 0.0202, 0.0141, 0.0389, 0.0402], + device='cuda:1'), in_proj_covar=tensor([0.0129, 0.0124, 0.0158, 0.0151, 0.0115, 0.0102, 0.0142, 0.0145], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 07:12:04,917 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53302.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:12:07,741 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53304.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:12:20,485 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53315.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:12:34,324 INFO [train.py:898] (1/4) Epoch 15, batch 2450, loss[loss=0.1777, simple_loss=0.2671, pruned_loss=0.04413, over 18563.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2627, pruned_loss=0.04369, over 3574618.74 frames. ], batch size: 49, lr: 7.52e-03, grad_scale: 8.0 +2023-03-09 07:13:10,224 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.975e+02 3.035e+02 3.526e+02 4.195e+02 8.583e+02, threshold=7.052e+02, percent-clipped=2.0 +2023-03-09 07:13:19,460 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53365.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:13:21,695 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53367.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:13:32,569 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53376.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:13:33,362 INFO [train.py:898] (1/4) Epoch 15, batch 2500, loss[loss=0.1736, simple_loss=0.2565, pruned_loss=0.04538, over 18267.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2629, pruned_loss=0.04359, over 3572049.61 frames. ], batch size: 47, lr: 7.52e-03, grad_scale: 8.0 +2023-03-09 07:13:37,061 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5562, 3.4682, 3.2910, 2.9629, 3.2503, 2.5715, 2.5368, 3.4439], + device='cuda:1'), covar=tensor([0.0061, 0.0095, 0.0095, 0.0136, 0.0104, 0.0211, 0.0217, 0.0077], + device='cuda:1'), in_proj_covar=tensor([0.0117, 0.0138, 0.0119, 0.0170, 0.0123, 0.0165, 0.0169, 0.0100], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 07:14:09,350 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53408.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:14:14,220 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53412.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:14:32,212 INFO [train.py:898] (1/4) Epoch 15, batch 2550, loss[loss=0.1788, simple_loss=0.2732, pruned_loss=0.04219, over 18626.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2638, pruned_loss=0.04409, over 3580499.49 frames. ], batch size: 52, lr: 7.52e-03, grad_scale: 8.0 +2023-03-09 07:14:33,598 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53428.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 07:14:49,228 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0711, 5.0925, 5.2034, 5.1677, 5.0525, 5.7497, 5.3669, 5.1286], + device='cuda:1'), covar=tensor([0.1194, 0.0793, 0.0796, 0.0809, 0.1508, 0.0818, 0.0746, 0.1757], + device='cuda:1'), in_proj_covar=tensor([0.0340, 0.0266, 0.0284, 0.0283, 0.0316, 0.0395, 0.0259, 0.0388], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:1') +2023-03-09 07:15:05,467 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53456.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:15:07,572 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.050e+02 2.986e+02 3.579e+02 4.675e+02 1.603e+03, threshold=7.159e+02, percent-clipped=6.0 +2023-03-09 07:15:30,231 INFO [train.py:898] (1/4) Epoch 15, batch 2600, loss[loss=0.1667, simple_loss=0.2471, pruned_loss=0.04312, over 18356.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2623, pruned_loss=0.04367, over 3582711.50 frames. ], batch size: 46, lr: 7.51e-03, grad_scale: 8.0 +2023-03-09 07:16:29,545 INFO [train.py:898] (1/4) Epoch 15, batch 2650, loss[loss=0.1725, simple_loss=0.2702, pruned_loss=0.03739, over 18102.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2627, pruned_loss=0.04379, over 3574777.24 frames. ], batch size: 62, lr: 7.51e-03, grad_scale: 8.0 +2023-03-09 07:17:05,495 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.951e+02 2.884e+02 3.324e+02 4.046e+02 9.778e+02, threshold=6.647e+02, percent-clipped=2.0 +2023-03-09 07:17:27,748 INFO [train.py:898] (1/4) Epoch 15, batch 2700, loss[loss=0.1625, simple_loss=0.2496, pruned_loss=0.03766, over 18491.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2629, pruned_loss=0.04396, over 3570134.17 frames. ], batch size: 44, lr: 7.51e-03, grad_scale: 8.0 +2023-03-09 07:17:51,379 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53597.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:18:26,762 INFO [train.py:898] (1/4) Epoch 15, batch 2750, loss[loss=0.1435, simple_loss=0.2254, pruned_loss=0.03082, over 18452.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2622, pruned_loss=0.04378, over 3563200.07 frames. ], batch size: 43, lr: 7.50e-03, grad_scale: 8.0 +2023-03-09 07:18:42,874 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 07:19:03,250 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.144e+02 2.856e+02 3.450e+02 4.147e+02 9.079e+02, threshold=6.900e+02, percent-clipped=4.0 +2023-03-09 07:19:05,844 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53660.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:19:18,639 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53671.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:19:24,960 INFO [train.py:898] (1/4) Epoch 15, batch 2800, loss[loss=0.1563, simple_loss=0.2441, pruned_loss=0.03424, over 18166.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.262, pruned_loss=0.04367, over 3572157.37 frames. ], batch size: 44, lr: 7.50e-03, grad_scale: 8.0 +2023-03-09 07:20:06,931 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53712.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:20:20,066 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53723.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 07:20:24,341 INFO [train.py:898] (1/4) Epoch 15, batch 2850, loss[loss=0.1687, simple_loss=0.2565, pruned_loss=0.04045, over 18424.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2617, pruned_loss=0.04331, over 3576824.54 frames. ], batch size: 48, lr: 7.50e-03, grad_scale: 8.0 +2023-03-09 07:20:32,403 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.52 vs. limit=5.0 +2023-03-09 07:20:35,985 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-09 07:21:00,854 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.810e+02 2.667e+02 3.515e+02 4.128e+02 7.427e+02, threshold=7.030e+02, percent-clipped=1.0 +2023-03-09 07:21:02,320 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1155, 5.5492, 3.0384, 5.3349, 5.2410, 5.5747, 5.3619, 2.7617], + device='cuda:1'), covar=tensor([0.0183, 0.0047, 0.0627, 0.0064, 0.0060, 0.0048, 0.0070, 0.0951], + device='cuda:1'), in_proj_covar=tensor([0.0083, 0.0075, 0.0092, 0.0089, 0.0081, 0.0072, 0.0081, 0.0095], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 07:21:03,257 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53760.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:21:22,592 INFO [train.py:898] (1/4) Epoch 15, batch 2900, loss[loss=0.1819, simple_loss=0.2687, pruned_loss=0.04757, over 18106.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.262, pruned_loss=0.04346, over 3577129.17 frames. ], batch size: 62, lr: 7.49e-03, grad_scale: 8.0 +2023-03-09 07:21:59,015 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-09 07:22:21,622 INFO [train.py:898] (1/4) Epoch 15, batch 2950, loss[loss=0.1708, simple_loss=0.2634, pruned_loss=0.0391, over 18291.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2616, pruned_loss=0.04321, over 3585829.50 frames. ], batch size: 54, lr: 7.49e-03, grad_scale: 8.0 +2023-03-09 07:22:42,381 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2502, 4.2199, 2.6682, 4.2277, 5.4292, 2.7896, 4.1202, 4.1987], + device='cuda:1'), covar=tensor([0.0114, 0.1292, 0.1474, 0.0529, 0.0061, 0.1128, 0.0532, 0.0645], + device='cuda:1'), in_proj_covar=tensor([0.0140, 0.0248, 0.0195, 0.0187, 0.0101, 0.0177, 0.0205, 0.0213], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 07:22:58,162 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.012e+02 2.809e+02 3.403e+02 4.048e+02 1.283e+03, threshold=6.805e+02, percent-clipped=2.0 +2023-03-09 07:23:20,580 INFO [train.py:898] (1/4) Epoch 15, batch 3000, loss[loss=0.166, simple_loss=0.2527, pruned_loss=0.03959, over 18631.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2609, pruned_loss=0.0428, over 3595546.55 frames. ], batch size: 52, lr: 7.49e-03, grad_scale: 8.0 +2023-03-09 07:23:20,580 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 07:23:32,494 INFO [train.py:932] (1/4) Epoch 15, validation: loss=0.1532, simple_loss=0.254, pruned_loss=0.02619, over 944034.00 frames. +2023-03-09 07:23:32,494 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 07:23:56,137 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53897.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:24:05,152 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4662, 6.0243, 5.4966, 5.7744, 5.5140, 5.4609, 6.0740, 6.0042], + device='cuda:1'), covar=tensor([0.1115, 0.0632, 0.0478, 0.0708, 0.1446, 0.0761, 0.0546, 0.0636], + device='cuda:1'), in_proj_covar=tensor([0.0574, 0.0477, 0.0356, 0.0509, 0.0696, 0.0509, 0.0681, 0.0511], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 07:24:30,760 INFO [train.py:898] (1/4) Epoch 15, batch 3050, loss[loss=0.1978, simple_loss=0.2823, pruned_loss=0.05665, over 18489.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2618, pruned_loss=0.04323, over 3581331.61 frames. ], batch size: 53, lr: 7.48e-03, grad_scale: 4.0 +2023-03-09 07:24:52,730 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53945.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:25:08,737 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.036e+02 2.885e+02 3.297e+02 3.882e+02 9.425e+02, threshold=6.594e+02, percent-clipped=2.0 +2023-03-09 07:25:10,119 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53960.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:25:22,574 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53971.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:25:29,421 INFO [train.py:898] (1/4) Epoch 15, batch 3100, loss[loss=0.1656, simple_loss=0.2479, pruned_loss=0.04166, over 18410.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2621, pruned_loss=0.04374, over 3565860.15 frames. ], batch size: 42, lr: 7.48e-03, grad_scale: 4.0 +2023-03-09 07:26:10,179 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=54008.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:26:20,227 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-03-09 07:26:23,249 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=54019.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:26:27,848 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=54023.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 07:26:31,792 INFO [train.py:898] (1/4) Epoch 15, batch 3150, loss[loss=0.153, simple_loss=0.2329, pruned_loss=0.03652, over 18267.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2613, pruned_loss=0.04353, over 3572194.48 frames. ], batch size: 45, lr: 7.48e-03, grad_scale: 4.0 +2023-03-09 07:27:09,867 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.040e+02 2.859e+02 3.550e+02 4.150e+02 8.480e+02, threshold=7.099e+02, percent-clipped=1.0 +2023-03-09 07:27:23,150 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1067, 4.0632, 5.3645, 2.9803, 4.6850, 2.8776, 3.1035, 1.9544], + device='cuda:1'), covar=tensor([0.0925, 0.0780, 0.0092, 0.0821, 0.0512, 0.2245, 0.2847, 0.1997], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0227, 0.0146, 0.0184, 0.0243, 0.0256, 0.0301, 0.0221], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 07:27:24,062 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=54071.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:27:30,816 INFO [train.py:898] (1/4) Epoch 15, batch 3200, loss[loss=0.1893, simple_loss=0.2802, pruned_loss=0.0492, over 18336.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2614, pruned_loss=0.04362, over 3578251.60 frames. ], batch size: 55, lr: 7.47e-03, grad_scale: 8.0 +2023-03-09 07:28:28,996 INFO [train.py:898] (1/4) Epoch 15, batch 3250, loss[loss=0.2079, simple_loss=0.2915, pruned_loss=0.06209, over 12880.00 frames. ], tot_loss[loss=0.174, simple_loss=0.261, pruned_loss=0.04351, over 3577886.86 frames. ], batch size: 131, lr: 7.47e-03, grad_scale: 8.0 +2023-03-09 07:29:07,308 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.872e+02 2.643e+02 3.129e+02 4.018e+02 8.489e+02, threshold=6.258e+02, percent-clipped=1.0 +2023-03-09 07:29:28,034 INFO [train.py:898] (1/4) Epoch 15, batch 3300, loss[loss=0.1841, simple_loss=0.2733, pruned_loss=0.04746, over 15936.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2612, pruned_loss=0.04341, over 3580122.86 frames. ], batch size: 94, lr: 7.46e-03, grad_scale: 8.0 +2023-03-09 07:30:24,851 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9580, 3.9271, 5.0762, 3.0144, 4.3304, 2.6537, 3.0533, 1.9407], + device='cuda:1'), covar=tensor([0.1036, 0.0772, 0.0175, 0.0804, 0.0604, 0.2406, 0.2771, 0.1957], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0229, 0.0147, 0.0184, 0.0246, 0.0259, 0.0305, 0.0222], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 07:30:27,887 INFO [train.py:898] (1/4) Epoch 15, batch 3350, loss[loss=0.1709, simple_loss=0.2609, pruned_loss=0.04046, over 18373.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2612, pruned_loss=0.04295, over 3585743.56 frames. ], batch size: 52, lr: 7.46e-03, grad_scale: 8.0 +2023-03-09 07:30:39,552 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1978, 5.1236, 5.3476, 5.3483, 5.1036, 5.8641, 5.5464, 5.1396], + device='cuda:1'), covar=tensor([0.1019, 0.0672, 0.0673, 0.0761, 0.1352, 0.0705, 0.0653, 0.1685], + device='cuda:1'), in_proj_covar=tensor([0.0337, 0.0262, 0.0283, 0.0282, 0.0316, 0.0394, 0.0258, 0.0387], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:1') +2023-03-09 07:31:05,787 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.725e+02 2.934e+02 3.343e+02 4.171e+02 1.510e+03, threshold=6.685e+02, percent-clipped=4.0 +2023-03-09 07:31:16,414 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.3158, 2.0630, 2.0414, 2.1711, 2.4098, 2.4520, 2.3598, 2.1599], + device='cuda:1'), covar=tensor([0.0217, 0.0241, 0.0411, 0.0363, 0.0220, 0.0204, 0.0351, 0.0323], + device='cuda:1'), in_proj_covar=tensor([0.0130, 0.0124, 0.0158, 0.0149, 0.0115, 0.0105, 0.0142, 0.0145], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 07:31:26,864 INFO [train.py:898] (1/4) Epoch 15, batch 3400, loss[loss=0.1973, simple_loss=0.2836, pruned_loss=0.05552, over 17105.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2612, pruned_loss=0.04288, over 3590077.25 frames. ], batch size: 78, lr: 7.46e-03, grad_scale: 8.0 +2023-03-09 07:32:24,926 INFO [train.py:898] (1/4) Epoch 15, batch 3450, loss[loss=0.1734, simple_loss=0.2551, pruned_loss=0.04586, over 18325.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2608, pruned_loss=0.04292, over 3591063.00 frames. ], batch size: 46, lr: 7.45e-03, grad_scale: 8.0 +2023-03-09 07:32:32,592 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8470, 4.5067, 4.6099, 3.3202, 3.7812, 3.4539, 2.8429, 2.4548], + device='cuda:1'), covar=tensor([0.0241, 0.0175, 0.0082, 0.0324, 0.0341, 0.0231, 0.0723, 0.0927], + device='cuda:1'), in_proj_covar=tensor([0.0066, 0.0054, 0.0056, 0.0065, 0.0086, 0.0063, 0.0076, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 07:33:02,929 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.097e+02 3.124e+02 3.686e+02 4.996e+02 1.186e+03, threshold=7.372e+02, percent-clipped=7.0 +2023-03-09 07:33:23,267 INFO [train.py:898] (1/4) Epoch 15, batch 3500, loss[loss=0.2223, simple_loss=0.3049, pruned_loss=0.0698, over 12764.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2609, pruned_loss=0.04311, over 3569375.48 frames. ], batch size: 130, lr: 7.45e-03, grad_scale: 8.0 +2023-03-09 07:33:43,038 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.67 vs. limit=5.0 +2023-03-09 07:34:20,583 INFO [train.py:898] (1/4) Epoch 15, batch 3550, loss[loss=0.1492, simple_loss=0.2354, pruned_loss=0.03148, over 18282.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.261, pruned_loss=0.04313, over 3574413.66 frames. ], batch size: 49, lr: 7.45e-03, grad_scale: 8.0 +2023-03-09 07:34:38,460 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5948, 2.1159, 2.6194, 2.5486, 3.1313, 4.8248, 4.4526, 3.4961], + device='cuda:1'), covar=tensor([0.1508, 0.2256, 0.2629, 0.1714, 0.2230, 0.0155, 0.0396, 0.0726], + device='cuda:1'), in_proj_covar=tensor([0.0268, 0.0325, 0.0351, 0.0261, 0.0374, 0.0214, 0.0277, 0.0231], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 07:34:54,890 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.932e+02 3.002e+02 3.469e+02 4.157e+02 6.561e+02, threshold=6.938e+02, percent-clipped=0.0 +2023-03-09 07:35:13,951 INFO [train.py:898] (1/4) Epoch 15, batch 3600, loss[loss=0.1744, simple_loss=0.2695, pruned_loss=0.03965, over 18463.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2609, pruned_loss=0.04291, over 3588298.70 frames. ], batch size: 53, lr: 7.44e-03, grad_scale: 8.0 +2023-03-09 07:36:16,271 INFO [train.py:898] (1/4) Epoch 16, batch 0, loss[loss=0.2021, simple_loss=0.2822, pruned_loss=0.06096, over 12459.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2822, pruned_loss=0.06096, over 12459.00 frames. ], batch size: 129, lr: 7.20e-03, grad_scale: 8.0 +2023-03-09 07:36:16,272 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 07:36:28,070 INFO [train.py:932] (1/4) Epoch 16, validation: loss=0.1541, simple_loss=0.2552, pruned_loss=0.02651, over 944034.00 frames. +2023-03-09 07:36:28,071 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 07:37:06,286 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6255, 3.5592, 3.4786, 3.1452, 3.3147, 2.6720, 2.6766, 3.6034], + device='cuda:1'), covar=tensor([0.0057, 0.0079, 0.0070, 0.0104, 0.0096, 0.0198, 0.0194, 0.0058], + device='cuda:1'), in_proj_covar=tensor([0.0118, 0.0138, 0.0118, 0.0170, 0.0122, 0.0165, 0.0168, 0.0100], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 07:37:24,163 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.170e+02 3.007e+02 3.556e+02 4.370e+02 7.449e+02, threshold=7.113e+02, percent-clipped=5.0 +2023-03-09 07:37:26,544 INFO [train.py:898] (1/4) Epoch 16, batch 50, loss[loss=0.1824, simple_loss=0.2708, pruned_loss=0.04701, over 18505.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2609, pruned_loss=0.04397, over 813607.93 frames. ], batch size: 51, lr: 7.20e-03, grad_scale: 8.0 +2023-03-09 07:38:25,630 INFO [train.py:898] (1/4) Epoch 16, batch 100, loss[loss=0.1813, simple_loss=0.2703, pruned_loss=0.04617, over 18473.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.261, pruned_loss=0.04338, over 1434270.93 frames. ], batch size: 59, lr: 7.20e-03, grad_scale: 8.0 +2023-03-09 07:39:21,644 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.042e+02 2.818e+02 3.254e+02 3.972e+02 9.345e+02, threshold=6.508e+02, percent-clipped=3.0 +2023-03-09 07:39:23,852 INFO [train.py:898] (1/4) Epoch 16, batch 150, loss[loss=0.1552, simple_loss=0.2324, pruned_loss=0.039, over 18243.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2598, pruned_loss=0.04313, over 1922453.85 frames. ], batch size: 45, lr: 7.19e-03, grad_scale: 8.0 +2023-03-09 07:39:27,642 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54664.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 07:39:47,481 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7458, 3.4537, 2.1883, 4.3808, 3.1490, 4.3050, 2.3948, 4.0903], + device='cuda:1'), covar=tensor([0.0528, 0.0857, 0.1439, 0.0511, 0.0820, 0.0294, 0.1302, 0.0372], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0220, 0.0185, 0.0268, 0.0187, 0.0254, 0.0196, 0.0193], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 07:40:22,365 INFO [train.py:898] (1/4) Epoch 16, batch 200, loss[loss=0.1921, simple_loss=0.2791, pruned_loss=0.05259, over 12573.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2598, pruned_loss=0.04241, over 2301361.86 frames. ], batch size: 130, lr: 7.19e-03, grad_scale: 8.0 +2023-03-09 07:40:38,426 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54725.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 07:40:39,507 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4580, 2.7210, 2.4396, 2.8446, 3.5678, 3.4856, 3.0707, 2.9643], + device='cuda:1'), covar=tensor([0.0192, 0.0297, 0.0569, 0.0368, 0.0142, 0.0136, 0.0318, 0.0350], + device='cuda:1'), in_proj_covar=tensor([0.0130, 0.0127, 0.0161, 0.0150, 0.0114, 0.0105, 0.0145, 0.0146], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 07:41:17,925 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.955e+02 3.015e+02 3.636e+02 4.590e+02 9.600e+02, threshold=7.273e+02, percent-clipped=5.0 +2023-03-09 07:41:18,633 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.00 vs. limit=2.0 +2023-03-09 07:41:20,199 INFO [train.py:898] (1/4) Epoch 16, batch 250, loss[loss=0.1782, simple_loss=0.2733, pruned_loss=0.04156, over 18567.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2613, pruned_loss=0.04285, over 2592845.90 frames. ], batch size: 54, lr: 7.19e-03, grad_scale: 8.0 +2023-03-09 07:41:49,782 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7855, 3.6644, 3.6343, 3.2287, 3.5302, 2.9201, 2.9962, 3.7841], + device='cuda:1'), covar=tensor([0.0044, 0.0074, 0.0066, 0.0113, 0.0078, 0.0152, 0.0157, 0.0045], + device='cuda:1'), in_proj_covar=tensor([0.0117, 0.0138, 0.0117, 0.0168, 0.0123, 0.0164, 0.0166, 0.0100], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 07:42:17,965 INFO [train.py:898] (1/4) Epoch 16, batch 300, loss[loss=0.1701, simple_loss=0.2653, pruned_loss=0.03745, over 18495.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.262, pruned_loss=0.04355, over 2810696.28 frames. ], batch size: 51, lr: 7.18e-03, grad_scale: 8.0 +2023-03-09 07:42:40,200 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.18 vs. limit=5.0 +2023-03-09 07:43:14,226 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.101e+02 2.934e+02 3.524e+02 4.515e+02 1.434e+03, threshold=7.048e+02, percent-clipped=4.0 +2023-03-09 07:43:16,409 INFO [train.py:898] (1/4) Epoch 16, batch 350, loss[loss=0.1928, simple_loss=0.28, pruned_loss=0.05284, over 17631.00 frames. ], tot_loss[loss=0.175, simple_loss=0.262, pruned_loss=0.04401, over 2977386.62 frames. ], batch size: 70, lr: 7.18e-03, grad_scale: 8.0 +2023-03-09 07:43:17,968 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54862.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:43:33,417 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9875, 4.9893, 5.0843, 4.8021, 4.8868, 4.8875, 5.2011, 5.2223], + device='cuda:1'), covar=tensor([0.0076, 0.0072, 0.0058, 0.0110, 0.0061, 0.0132, 0.0092, 0.0108], + device='cuda:1'), in_proj_covar=tensor([0.0089, 0.0065, 0.0069, 0.0088, 0.0071, 0.0098, 0.0083, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 07:44:15,044 INFO [train.py:898] (1/4) Epoch 16, batch 400, loss[loss=0.1624, simple_loss=0.2463, pruned_loss=0.03926, over 18369.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2613, pruned_loss=0.0433, over 3123320.94 frames. ], batch size: 46, lr: 7.18e-03, grad_scale: 8.0 +2023-03-09 07:44:26,675 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8543, 3.7327, 3.6391, 3.3043, 3.6227, 2.8996, 2.8704, 3.7999], + device='cuda:1'), covar=tensor([0.0046, 0.0090, 0.0071, 0.0115, 0.0073, 0.0168, 0.0183, 0.0057], + device='cuda:1'), in_proj_covar=tensor([0.0119, 0.0139, 0.0119, 0.0171, 0.0124, 0.0166, 0.0168, 0.0101], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 07:44:28,943 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54923.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 07:44:46,400 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 07:44:55,754 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.52 vs. limit=5.0 +2023-03-09 07:45:00,716 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9152, 4.4575, 4.6566, 3.4025, 3.7449, 3.5336, 2.6112, 2.5124], + device='cuda:1'), covar=tensor([0.0185, 0.0153, 0.0063, 0.0299, 0.0334, 0.0223, 0.0744, 0.0862], + device='cuda:1'), in_proj_covar=tensor([0.0066, 0.0055, 0.0056, 0.0065, 0.0087, 0.0063, 0.0077, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 07:45:00,755 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6700, 3.4165, 2.1036, 4.4053, 3.0056, 4.3415, 2.2621, 3.8687], + device='cuda:1'), covar=tensor([0.0561, 0.0788, 0.1465, 0.0411, 0.0850, 0.0266, 0.1241, 0.0433], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0221, 0.0186, 0.0268, 0.0188, 0.0256, 0.0196, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 07:45:11,975 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.963e+02 2.717e+02 3.272e+02 3.926e+02 7.148e+02, threshold=6.544e+02, percent-clipped=1.0 +2023-03-09 07:45:13,184 INFO [train.py:898] (1/4) Epoch 16, batch 450, loss[loss=0.1687, simple_loss=0.2515, pruned_loss=0.04301, over 18275.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2607, pruned_loss=0.04286, over 3242137.64 frames. ], batch size: 49, lr: 7.17e-03, grad_scale: 8.0 +2023-03-09 07:45:36,100 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9217, 3.6772, 5.0536, 4.3601, 3.1274, 2.9940, 4.3097, 5.1519], + device='cuda:1'), covar=tensor([0.0760, 0.1490, 0.0148, 0.0365, 0.1021, 0.1186, 0.0445, 0.0281], + device='cuda:1'), in_proj_covar=tensor([0.0144, 0.0266, 0.0130, 0.0174, 0.0184, 0.0185, 0.0186, 0.0175], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 07:46:12,498 INFO [train.py:898] (1/4) Epoch 16, batch 500, loss[loss=0.1505, simple_loss=0.2392, pruned_loss=0.03087, over 18517.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2603, pruned_loss=0.04246, over 3326036.94 frames. ], batch size: 47, lr: 7.17e-03, grad_scale: 8.0 +2023-03-09 07:46:23,066 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55020.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 07:46:37,213 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9168, 3.8017, 5.1277, 3.2037, 4.4511, 2.8244, 3.1507, 2.0203], + device='cuda:1'), covar=tensor([0.1062, 0.0826, 0.0120, 0.0742, 0.0579, 0.2171, 0.2542, 0.1876], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0228, 0.0147, 0.0183, 0.0245, 0.0257, 0.0304, 0.0221], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 07:47:02,762 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3396, 2.6704, 2.3622, 2.7444, 3.4447, 3.4982, 3.0640, 2.8365], + device='cuda:1'), covar=tensor([0.0184, 0.0335, 0.0636, 0.0402, 0.0202, 0.0128, 0.0358, 0.0425], + device='cuda:1'), in_proj_covar=tensor([0.0130, 0.0126, 0.0160, 0.0149, 0.0115, 0.0105, 0.0145, 0.0145], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 07:47:09,471 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.142e+02 2.826e+02 3.526e+02 4.225e+02 1.034e+03, threshold=7.052e+02, percent-clipped=4.0 +2023-03-09 07:47:10,627 INFO [train.py:898] (1/4) Epoch 16, batch 550, loss[loss=0.1343, simple_loss=0.2195, pruned_loss=0.02449, over 18153.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2605, pruned_loss=0.04256, over 3383074.97 frames. ], batch size: 44, lr: 7.17e-03, grad_scale: 8.0 +2023-03-09 07:47:21,533 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8142, 2.2696, 2.7490, 2.7168, 3.3828, 5.1083, 4.6813, 3.5985], + device='cuda:1'), covar=tensor([0.1483, 0.2324, 0.2725, 0.1696, 0.2024, 0.0143, 0.0426, 0.0741], + device='cuda:1'), in_proj_covar=tensor([0.0273, 0.0331, 0.0356, 0.0264, 0.0379, 0.0216, 0.0281, 0.0233], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 07:48:09,168 INFO [train.py:898] (1/4) Epoch 16, batch 600, loss[loss=0.1959, simple_loss=0.2901, pruned_loss=0.05088, over 18087.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2604, pruned_loss=0.04279, over 3414222.64 frames. ], batch size: 62, lr: 7.16e-03, grad_scale: 8.0 +2023-03-09 07:48:14,084 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6354, 3.3433, 2.2139, 4.3121, 3.0155, 4.2668, 2.3057, 3.8538], + device='cuda:1'), covar=tensor([0.0511, 0.0840, 0.1373, 0.0481, 0.0823, 0.0247, 0.1203, 0.0425], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0221, 0.0185, 0.0267, 0.0188, 0.0256, 0.0196, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 07:48:51,627 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55148.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:49:05,768 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.722e+02 2.752e+02 3.250e+02 4.071e+02 8.362e+02, threshold=6.500e+02, percent-clipped=2.0 +2023-03-09 07:49:06,111 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9856, 5.0710, 5.1426, 4.8479, 4.8004, 4.8975, 5.2268, 5.2813], + device='cuda:1'), covar=tensor([0.0061, 0.0060, 0.0047, 0.0094, 0.0063, 0.0125, 0.0079, 0.0085], + device='cuda:1'), in_proj_covar=tensor([0.0089, 0.0065, 0.0069, 0.0088, 0.0071, 0.0098, 0.0083, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 07:49:06,974 INFO [train.py:898] (1/4) Epoch 16, batch 650, loss[loss=0.1579, simple_loss=0.243, pruned_loss=0.03646, over 18333.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2609, pruned_loss=0.04295, over 3452646.01 frames. ], batch size: 46, lr: 7.16e-03, grad_scale: 8.0 +2023-03-09 07:50:03,485 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55209.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:50:05,816 INFO [train.py:898] (1/4) Epoch 16, batch 700, loss[loss=0.1857, simple_loss=0.2726, pruned_loss=0.04935, over 18263.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2613, pruned_loss=0.04313, over 3487455.52 frames. ], batch size: 60, lr: 7.16e-03, grad_scale: 4.0 +2023-03-09 07:50:06,119 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55211.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:50:14,570 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55218.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 07:50:51,211 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6641, 4.6956, 4.7542, 4.4589, 4.5624, 4.5673, 4.8418, 4.8226], + device='cuda:1'), covar=tensor([0.0071, 0.0066, 0.0068, 0.0102, 0.0063, 0.0136, 0.0073, 0.0119], + device='cuda:1'), in_proj_covar=tensor([0.0088, 0.0064, 0.0068, 0.0087, 0.0070, 0.0097, 0.0082, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 07:51:02,972 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.749e+02 2.606e+02 3.222e+02 3.898e+02 7.462e+02, threshold=6.443e+02, percent-clipped=3.0 +2023-03-09 07:51:02,998 INFO [train.py:898] (1/4) Epoch 16, batch 750, loss[loss=0.1779, simple_loss=0.272, pruned_loss=0.04195, over 18222.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2622, pruned_loss=0.04358, over 3517126.76 frames. ], batch size: 60, lr: 7.15e-03, grad_scale: 4.0 +2023-03-09 07:51:16,852 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55272.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:51:21,138 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-09 07:51:51,846 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.3797, 4.4108, 4.4152, 4.2161, 4.2650, 4.2498, 4.5343, 4.5232], + device='cuda:1'), covar=tensor([0.0076, 0.0073, 0.0073, 0.0099, 0.0072, 0.0140, 0.0073, 0.0110], + device='cuda:1'), in_proj_covar=tensor([0.0088, 0.0064, 0.0068, 0.0087, 0.0070, 0.0097, 0.0082, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 07:52:01,614 INFO [train.py:898] (1/4) Epoch 16, batch 800, loss[loss=0.2254, simple_loss=0.3003, pruned_loss=0.0752, over 13063.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2611, pruned_loss=0.04309, over 3532418.30 frames. ], batch size: 130, lr: 7.15e-03, grad_scale: 8.0 +2023-03-09 07:52:13,172 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55320.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 07:53:00,378 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.160e+02 2.840e+02 3.265e+02 3.800e+02 8.424e+02, threshold=6.530e+02, percent-clipped=5.0 +2023-03-09 07:53:00,409 INFO [train.py:898] (1/4) Epoch 16, batch 850, loss[loss=0.1722, simple_loss=0.2654, pruned_loss=0.0395, over 18564.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2601, pruned_loss=0.04272, over 3549951.87 frames. ], batch size: 54, lr: 7.15e-03, grad_scale: 8.0 +2023-03-09 07:53:08,443 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55368.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 07:53:36,466 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-03-09 07:53:59,236 INFO [train.py:898] (1/4) Epoch 16, batch 900, loss[loss=0.1496, simple_loss=0.2304, pruned_loss=0.03438, over 18462.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2609, pruned_loss=0.04277, over 3558346.23 frames. ], batch size: 43, lr: 7.15e-03, grad_scale: 8.0 +2023-03-09 07:54:57,057 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.979e+02 2.881e+02 3.338e+02 4.155e+02 1.042e+03, threshold=6.676e+02, percent-clipped=4.0 +2023-03-09 07:54:57,083 INFO [train.py:898] (1/4) Epoch 16, batch 950, loss[loss=0.1459, simple_loss=0.2328, pruned_loss=0.02949, over 18491.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2618, pruned_loss=0.04299, over 3573503.43 frames. ], batch size: 47, lr: 7.14e-03, grad_scale: 8.0 +2023-03-09 07:55:46,654 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55504.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:55:54,396 INFO [train.py:898] (1/4) Epoch 16, batch 1000, loss[loss=0.1418, simple_loss=0.2243, pruned_loss=0.0297, over 18453.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.261, pruned_loss=0.04259, over 3578369.39 frames. ], batch size: 43, lr: 7.14e-03, grad_scale: 8.0 +2023-03-09 07:56:01,053 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4639, 2.0249, 3.9376, 3.6901, 2.0750, 4.3117, 3.6408, 2.4840], + device='cuda:1'), covar=tensor([0.0475, 0.2434, 0.0359, 0.0338, 0.2503, 0.0248, 0.0689, 0.1460], + device='cuda:1'), in_proj_covar=tensor([0.0201, 0.0230, 0.0185, 0.0153, 0.0221, 0.0199, 0.0233, 0.0193], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 07:56:03,342 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55518.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 07:56:46,089 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8871, 4.9712, 4.9332, 4.6690, 4.7814, 4.7729, 5.0560, 5.1146], + device='cuda:1'), covar=tensor([0.0073, 0.0055, 0.0065, 0.0096, 0.0069, 0.0147, 0.0068, 0.0080], + device='cuda:1'), in_proj_covar=tensor([0.0088, 0.0064, 0.0068, 0.0087, 0.0070, 0.0096, 0.0081, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 07:56:53,730 INFO [train.py:898] (1/4) Epoch 16, batch 1050, loss[loss=0.1568, simple_loss=0.2387, pruned_loss=0.03745, over 18488.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.262, pruned_loss=0.04329, over 3556362.62 frames. ], batch size: 44, lr: 7.14e-03, grad_scale: 4.0 +2023-03-09 07:56:54,801 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.207e+02 2.979e+02 3.472e+02 4.235e+02 7.011e+02, threshold=6.944e+02, percent-clipped=2.0 +2023-03-09 07:57:00,132 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55566.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:57:01,317 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55567.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:57:52,428 INFO [train.py:898] (1/4) Epoch 16, batch 1100, loss[loss=0.1575, simple_loss=0.2502, pruned_loss=0.03238, over 18410.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.262, pruned_loss=0.04337, over 3572579.89 frames. ], batch size: 48, lr: 7.13e-03, grad_scale: 4.0 +2023-03-09 07:58:39,655 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4287, 3.3226, 1.8866, 4.1876, 2.8942, 4.1306, 2.1416, 3.6515], + device='cuda:1'), covar=tensor([0.0633, 0.0900, 0.1510, 0.0502, 0.0888, 0.0250, 0.1292, 0.0449], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0222, 0.0187, 0.0269, 0.0189, 0.0259, 0.0198, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 07:58:52,115 INFO [train.py:898] (1/4) Epoch 16, batch 1150, loss[loss=0.1949, simple_loss=0.2807, pruned_loss=0.0545, over 16163.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2619, pruned_loss=0.04357, over 3552449.83 frames. ], batch size: 94, lr: 7.13e-03, grad_scale: 4.0 +2023-03-09 07:58:53,246 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.682e+02 3.124e+02 3.807e+02 4.865e+02 2.142e+03, threshold=7.614e+02, percent-clipped=11.0 +2023-03-09 07:59:50,927 INFO [train.py:898] (1/4) Epoch 16, batch 1200, loss[loss=0.1878, simple_loss=0.2859, pruned_loss=0.04482, over 18576.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2606, pruned_loss=0.04298, over 3560136.58 frames. ], batch size: 54, lr: 7.13e-03, grad_scale: 8.0 +2023-03-09 07:59:55,927 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.4128, 3.2227, 4.3886, 3.9048, 3.0463, 2.9515, 3.9775, 4.5238], + device='cuda:1'), covar=tensor([0.0848, 0.1397, 0.0219, 0.0391, 0.0875, 0.1039, 0.0388, 0.0286], + device='cuda:1'), in_proj_covar=tensor([0.0141, 0.0261, 0.0127, 0.0172, 0.0181, 0.0182, 0.0183, 0.0172], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 08:00:10,943 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7061, 3.6855, 3.5038, 3.2228, 3.4388, 2.8653, 2.8331, 3.7749], + device='cuda:1'), covar=tensor([0.0053, 0.0072, 0.0084, 0.0125, 0.0091, 0.0187, 0.0185, 0.0043], + device='cuda:1'), in_proj_covar=tensor([0.0119, 0.0141, 0.0121, 0.0175, 0.0127, 0.0169, 0.0171, 0.0103], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 08:00:34,452 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55748.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:00:48,846 INFO [train.py:898] (1/4) Epoch 16, batch 1250, loss[loss=0.1683, simple_loss=0.26, pruned_loss=0.03828, over 17036.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2611, pruned_loss=0.04319, over 3569398.31 frames. ], batch size: 78, lr: 7.12e-03, grad_scale: 8.0 +2023-03-09 08:00:49,973 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.937e+02 2.771e+02 3.226e+02 3.849e+02 7.150e+02, threshold=6.452e+02, percent-clipped=0.0 +2023-03-09 08:01:39,814 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55804.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:01:45,565 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55809.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:01:47,531 INFO [train.py:898] (1/4) Epoch 16, batch 1300, loss[loss=0.1918, simple_loss=0.2806, pruned_loss=0.05151, over 18459.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2612, pruned_loss=0.04332, over 3567431.88 frames. ], batch size: 59, lr: 7.12e-03, grad_scale: 8.0 +2023-03-09 08:02:30,205 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55848.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:02:34,748 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55852.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:02:35,387 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.88 vs. limit=5.0 +2023-03-09 08:02:45,455 INFO [train.py:898] (1/4) Epoch 16, batch 1350, loss[loss=0.1822, simple_loss=0.2762, pruned_loss=0.04414, over 18498.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2618, pruned_loss=0.04345, over 3578537.10 frames. ], batch size: 59, lr: 7.12e-03, grad_scale: 8.0 +2023-03-09 08:02:46,544 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.956e+02 2.894e+02 3.394e+02 4.145e+02 8.688e+02, threshold=6.789e+02, percent-clipped=2.0 +2023-03-09 08:02:52,191 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55867.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:03:42,133 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55909.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:03:43,925 INFO [train.py:898] (1/4) Epoch 16, batch 1400, loss[loss=0.1784, simple_loss=0.2628, pruned_loss=0.04705, over 18290.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2618, pruned_loss=0.04354, over 3582266.14 frames. ], batch size: 49, lr: 7.11e-03, grad_scale: 8.0 +2023-03-09 08:03:48,580 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55915.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:04:09,343 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7178, 3.5638, 3.4737, 3.1157, 3.4019, 2.7134, 2.6956, 3.7462], + device='cuda:1'), covar=tensor([0.0048, 0.0086, 0.0074, 0.0121, 0.0087, 0.0185, 0.0196, 0.0043], + device='cuda:1'), in_proj_covar=tensor([0.0119, 0.0140, 0.0121, 0.0174, 0.0127, 0.0168, 0.0171, 0.0103], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 08:04:42,859 INFO [train.py:898] (1/4) Epoch 16, batch 1450, loss[loss=0.1342, simple_loss=0.2179, pruned_loss=0.02527, over 18242.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2612, pruned_loss=0.0429, over 3588448.60 frames. ], batch size: 45, lr: 7.11e-03, grad_scale: 8.0 +2023-03-09 08:04:43,988 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.940e+02 2.882e+02 3.410e+02 3.952e+02 8.442e+02, threshold=6.821e+02, percent-clipped=3.0 +2023-03-09 08:05:46,795 INFO [train.py:898] (1/4) Epoch 16, batch 1500, loss[loss=0.1968, simple_loss=0.2802, pruned_loss=0.05674, over 18452.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.261, pruned_loss=0.04271, over 3580824.83 frames. ], batch size: 59, lr: 7.11e-03, grad_scale: 8.0 +2023-03-09 08:05:55,069 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2131, 5.2695, 5.3856, 5.4907, 5.1511, 6.0114, 5.6095, 5.4164], + device='cuda:1'), covar=tensor([0.1214, 0.0641, 0.0832, 0.0642, 0.1644, 0.0795, 0.0704, 0.1608], + device='cuda:1'), in_proj_covar=tensor([0.0338, 0.0265, 0.0287, 0.0285, 0.0316, 0.0395, 0.0262, 0.0386], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:1') +2023-03-09 08:06:43,873 INFO [train.py:898] (1/4) Epoch 16, batch 1550, loss[loss=0.1638, simple_loss=0.249, pruned_loss=0.03928, over 18406.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2601, pruned_loss=0.04245, over 3592502.58 frames. ], batch size: 48, lr: 7.10e-03, grad_scale: 8.0 +2023-03-09 08:06:44,923 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.796e+02 2.823e+02 3.373e+02 3.914e+02 6.631e+02, threshold=6.746e+02, percent-clipped=0.0 +2023-03-09 08:07:33,280 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56104.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:07:38,097 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5263, 3.4884, 3.3312, 2.9791, 3.2565, 2.5678, 2.5426, 3.6040], + device='cuda:1'), covar=tensor([0.0056, 0.0075, 0.0083, 0.0136, 0.0086, 0.0186, 0.0204, 0.0044], + device='cuda:1'), in_proj_covar=tensor([0.0119, 0.0140, 0.0121, 0.0174, 0.0127, 0.0167, 0.0170, 0.0103], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 08:07:41,141 INFO [train.py:898] (1/4) Epoch 16, batch 1600, loss[loss=0.1535, simple_loss=0.2364, pruned_loss=0.03525, over 18267.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2604, pruned_loss=0.04227, over 3596110.28 frames. ], batch size: 47, lr: 7.10e-03, grad_scale: 8.0 +2023-03-09 08:08:21,192 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56145.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:08:39,570 INFO [train.py:898] (1/4) Epoch 16, batch 1650, loss[loss=0.181, simple_loss=0.2677, pruned_loss=0.04714, over 18492.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2614, pruned_loss=0.04266, over 3584193.66 frames. ], batch size: 53, lr: 7.10e-03, grad_scale: 8.0 +2023-03-09 08:08:40,625 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.176e+02 2.932e+02 3.714e+02 4.558e+02 1.092e+03, threshold=7.428e+02, percent-clipped=5.0 +2023-03-09 08:09:20,846 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7094, 3.7996, 5.1963, 4.6049, 3.4306, 3.2198, 4.5438, 5.3987], + device='cuda:1'), covar=tensor([0.0826, 0.1545, 0.0149, 0.0303, 0.0868, 0.1021, 0.0338, 0.0199], + device='cuda:1'), in_proj_covar=tensor([0.0143, 0.0264, 0.0130, 0.0175, 0.0184, 0.0184, 0.0186, 0.0176], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 08:09:30,610 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56204.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:09:33,099 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56206.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:09:38,406 INFO [train.py:898] (1/4) Epoch 16, batch 1700, loss[loss=0.1764, simple_loss=0.263, pruned_loss=0.04493, over 18412.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2611, pruned_loss=0.04259, over 3584259.49 frames. ], batch size: 48, lr: 7.09e-03, grad_scale: 8.0 +2023-03-09 08:10:31,383 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8629, 3.8946, 5.2933, 4.6683, 3.4192, 3.3253, 4.6484, 5.4195], + device='cuda:1'), covar=tensor([0.0742, 0.1546, 0.0153, 0.0297, 0.0838, 0.0993, 0.0350, 0.0207], + device='cuda:1'), in_proj_covar=tensor([0.0143, 0.0264, 0.0130, 0.0174, 0.0184, 0.0184, 0.0186, 0.0175], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 08:10:36,656 INFO [train.py:898] (1/4) Epoch 16, batch 1750, loss[loss=0.1753, simple_loss=0.2681, pruned_loss=0.04127, over 17983.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2614, pruned_loss=0.04273, over 3585086.78 frames. ], batch size: 65, lr: 7.09e-03, grad_scale: 8.0 +2023-03-09 08:10:37,713 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.941e+02 2.914e+02 3.484e+02 4.158e+02 1.053e+03, threshold=6.969e+02, percent-clipped=1.0 +2023-03-09 08:10:59,170 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0075, 4.9582, 4.6112, 4.8934, 4.9128, 4.3194, 4.8430, 4.6262], + device='cuda:1'), covar=tensor([0.0397, 0.0442, 0.1279, 0.0693, 0.0542, 0.0447, 0.0412, 0.1145], + device='cuda:1'), in_proj_covar=tensor([0.0448, 0.0511, 0.0658, 0.0403, 0.0398, 0.0468, 0.0500, 0.0632], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 08:11:19,979 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7254, 3.7279, 5.0226, 4.4975, 3.3525, 3.1516, 4.6349, 5.2707], + device='cuda:1'), covar=tensor([0.0794, 0.1698, 0.0178, 0.0352, 0.0827, 0.1057, 0.0310, 0.0164], + device='cuda:1'), in_proj_covar=tensor([0.0144, 0.0265, 0.0131, 0.0175, 0.0185, 0.0186, 0.0187, 0.0177], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 08:11:29,064 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 08:11:36,031 INFO [train.py:898] (1/4) Epoch 16, batch 1800, loss[loss=0.1761, simple_loss=0.2517, pruned_loss=0.05024, over 17635.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2621, pruned_loss=0.04313, over 3561113.21 frames. ], batch size: 39, lr: 7.09e-03, grad_scale: 8.0 +2023-03-09 08:12:34,548 INFO [train.py:898] (1/4) Epoch 16, batch 1850, loss[loss=0.1593, simple_loss=0.2584, pruned_loss=0.03008, over 18304.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2617, pruned_loss=0.04297, over 3574014.88 frames. ], batch size: 54, lr: 7.09e-03, grad_scale: 8.0 +2023-03-09 08:12:35,493 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.821e+02 3.124e+02 3.834e+02 4.699e+02 1.584e+03, threshold=7.668e+02, percent-clipped=5.0 +2023-03-09 08:13:24,923 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56404.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:13:32,350 INFO [train.py:898] (1/4) Epoch 16, batch 1900, loss[loss=0.2063, simple_loss=0.281, pruned_loss=0.06579, over 12678.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2622, pruned_loss=0.04324, over 3573279.49 frames. ], batch size: 130, lr: 7.08e-03, grad_scale: 8.0 +2023-03-09 08:14:19,990 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56452.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:14:30,009 INFO [train.py:898] (1/4) Epoch 16, batch 1950, loss[loss=0.1644, simple_loss=0.255, pruned_loss=0.03695, over 18291.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2626, pruned_loss=0.04351, over 3582028.87 frames. ], batch size: 49, lr: 7.08e-03, grad_scale: 8.0 +2023-03-09 08:14:31,033 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.810e+02 3.032e+02 3.372e+02 4.243e+02 1.557e+03, threshold=6.744e+02, percent-clipped=4.0 +2023-03-09 08:14:35,844 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-09 08:15:16,855 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56501.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:15:20,430 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56504.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:15:27,937 INFO [train.py:898] (1/4) Epoch 16, batch 2000, loss[loss=0.1954, simple_loss=0.2815, pruned_loss=0.05461, over 18416.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2624, pruned_loss=0.04329, over 3585027.06 frames. ], batch size: 52, lr: 7.08e-03, grad_scale: 8.0 +2023-03-09 08:15:42,593 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56523.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 08:15:42,746 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5397, 2.1509, 2.6426, 2.4841, 3.2121, 4.7974, 4.4282, 3.8754], + device='cuda:1'), covar=tensor([0.1521, 0.2324, 0.2613, 0.1785, 0.2077, 0.0166, 0.0413, 0.0599], + device='cuda:1'), in_proj_covar=tensor([0.0275, 0.0330, 0.0355, 0.0265, 0.0379, 0.0218, 0.0283, 0.0234], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 08:16:16,966 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56552.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:16:26,973 INFO [train.py:898] (1/4) Epoch 16, batch 2050, loss[loss=0.1875, simple_loss=0.272, pruned_loss=0.05145, over 18061.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2625, pruned_loss=0.04327, over 3585329.32 frames. ], batch size: 65, lr: 7.07e-03, grad_scale: 8.0 +2023-03-09 08:16:28,115 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.109e+02 2.908e+02 3.297e+02 4.101e+02 7.290e+02, threshold=6.593e+02, percent-clipped=1.0 +2023-03-09 08:16:54,721 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56584.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 08:17:26,195 INFO [train.py:898] (1/4) Epoch 16, batch 2100, loss[loss=0.1559, simple_loss=0.251, pruned_loss=0.03045, over 18304.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2618, pruned_loss=0.04285, over 3601603.13 frames. ], batch size: 54, lr: 7.07e-03, grad_scale: 4.0 +2023-03-09 08:18:11,055 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-09 08:18:14,508 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-09 08:18:25,143 INFO [train.py:898] (1/4) Epoch 16, batch 2150, loss[loss=0.1853, simple_loss=0.2769, pruned_loss=0.04685, over 18343.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2619, pruned_loss=0.04288, over 3591942.95 frames. ], batch size: 56, lr: 7.07e-03, grad_scale: 4.0 +2023-03-09 08:18:27,229 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.074e+02 2.907e+02 3.397e+02 4.221e+02 7.037e+02, threshold=6.794e+02, percent-clipped=2.0 +2023-03-09 08:19:23,218 INFO [train.py:898] (1/4) Epoch 16, batch 2200, loss[loss=0.1859, simple_loss=0.2716, pruned_loss=0.05011, over 18468.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2616, pruned_loss=0.04291, over 3582318.16 frames. ], batch size: 59, lr: 7.06e-03, grad_scale: 4.0 +2023-03-09 08:19:57,331 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.4640, 3.5853, 4.9052, 4.2790, 3.1624, 2.8332, 4.1912, 5.1297], + device='cuda:1'), covar=tensor([0.0909, 0.1516, 0.0167, 0.0391, 0.0950, 0.1181, 0.0400, 0.0243], + device='cuda:1'), in_proj_covar=tensor([0.0142, 0.0261, 0.0129, 0.0173, 0.0183, 0.0184, 0.0185, 0.0175], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 08:20:12,996 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5615, 2.8944, 2.5415, 2.9192, 3.6097, 3.6067, 3.0904, 3.0891], + device='cuda:1'), covar=tensor([0.0174, 0.0260, 0.0586, 0.0379, 0.0163, 0.0151, 0.0380, 0.0354], + device='cuda:1'), in_proj_covar=tensor([0.0129, 0.0125, 0.0158, 0.0148, 0.0117, 0.0105, 0.0147, 0.0143], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 08:20:21,607 INFO [train.py:898] (1/4) Epoch 16, batch 2250, loss[loss=0.1872, simple_loss=0.2767, pruned_loss=0.04885, over 18546.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.261, pruned_loss=0.04291, over 3588938.57 frames. ], batch size: 49, lr: 7.06e-03, grad_scale: 4.0 +2023-03-09 08:20:23,721 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.871e+02 2.822e+02 3.228e+02 3.719e+02 7.082e+02, threshold=6.456e+02, percent-clipped=1.0 +2023-03-09 08:21:09,235 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56801.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:21:11,631 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56803.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:21:12,630 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4770, 6.0150, 5.5625, 5.7531, 5.5224, 5.4223, 6.0357, 5.9909], + device='cuda:1'), covar=tensor([0.1179, 0.0642, 0.0375, 0.0700, 0.1417, 0.0701, 0.0555, 0.0664], + device='cuda:1'), in_proj_covar=tensor([0.0576, 0.0489, 0.0360, 0.0516, 0.0705, 0.0516, 0.0693, 0.0522], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 08:21:20,392 INFO [train.py:898] (1/4) Epoch 16, batch 2300, loss[loss=0.1918, simple_loss=0.2832, pruned_loss=0.05024, over 18287.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2606, pruned_loss=0.04264, over 3601363.30 frames. ], batch size: 57, lr: 7.06e-03, grad_scale: 4.0 +2023-03-09 08:21:23,073 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8421, 4.4862, 4.5918, 3.4935, 3.8840, 3.6086, 3.0147, 2.5915], + device='cuda:1'), covar=tensor([0.0187, 0.0149, 0.0071, 0.0258, 0.0276, 0.0206, 0.0556, 0.0831], + device='cuda:1'), in_proj_covar=tensor([0.0065, 0.0053, 0.0056, 0.0064, 0.0084, 0.0062, 0.0074, 0.0081], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 08:21:38,801 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.13 vs. limit=5.0 +2023-03-09 08:22:04,698 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56849.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:22:18,546 INFO [train.py:898] (1/4) Epoch 16, batch 2350, loss[loss=0.1755, simple_loss=0.2636, pruned_loss=0.04374, over 18572.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2612, pruned_loss=0.04266, over 3594313.96 frames. ], batch size: 54, lr: 7.05e-03, grad_scale: 4.0 +2023-03-09 08:22:18,940 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2122, 5.5854, 2.8510, 5.4209, 5.2800, 5.6290, 5.4211, 3.0776], + device='cuda:1'), covar=tensor([0.0148, 0.0057, 0.0693, 0.0062, 0.0058, 0.0059, 0.0084, 0.0783], + device='cuda:1'), in_proj_covar=tensor([0.0082, 0.0075, 0.0090, 0.0087, 0.0080, 0.0070, 0.0080, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 08:22:20,836 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.203e+02 2.978e+02 3.446e+02 4.170e+02 7.854e+02, threshold=6.893e+02, percent-clipped=4.0 +2023-03-09 08:22:22,376 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56864.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:22:39,084 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56879.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 08:23:09,868 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-09 08:23:13,909 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4103, 2.7674, 3.9502, 3.4797, 2.4210, 4.1742, 3.6915, 2.6953], + device='cuda:1'), covar=tensor([0.0575, 0.1496, 0.0309, 0.0404, 0.1672, 0.0234, 0.0560, 0.0994], + device='cuda:1'), in_proj_covar=tensor([0.0205, 0.0234, 0.0191, 0.0156, 0.0222, 0.0206, 0.0239, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 08:23:13,926 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56908.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:23:17,001 INFO [train.py:898] (1/4) Epoch 16, batch 2400, loss[loss=0.1746, simple_loss=0.2698, pruned_loss=0.03971, over 18348.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2609, pruned_loss=0.04251, over 3592423.60 frames. ], batch size: 55, lr: 7.05e-03, grad_scale: 8.0 +2023-03-09 08:23:24,480 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-09 08:24:15,263 INFO [train.py:898] (1/4) Epoch 16, batch 2450, loss[loss=0.1651, simple_loss=0.2474, pruned_loss=0.04135, over 18155.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2603, pruned_loss=0.04249, over 3591909.09 frames. ], batch size: 44, lr: 7.05e-03, grad_scale: 8.0 +2023-03-09 08:24:17,554 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.256e+02 2.910e+02 3.453e+02 4.291e+02 1.108e+03, threshold=6.907e+02, percent-clipped=4.0 +2023-03-09 08:24:24,711 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56969.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:24:28,232 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1293, 4.2867, 2.5127, 4.2176, 5.3002, 2.6280, 3.9406, 3.9740], + device='cuda:1'), covar=tensor([0.0112, 0.1003, 0.1427, 0.0542, 0.0053, 0.1155, 0.0596, 0.0664], + device='cuda:1'), in_proj_covar=tensor([0.0146, 0.0255, 0.0196, 0.0190, 0.0105, 0.0178, 0.0210, 0.0217], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 08:24:52,045 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56993.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:24:54,299 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56995.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:25:11,458 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.08 vs. limit=5.0 +2023-03-09 08:25:13,038 INFO [train.py:898] (1/4) Epoch 16, batch 2500, loss[loss=0.1702, simple_loss=0.2721, pruned_loss=0.03417, over 18317.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2603, pruned_loss=0.04261, over 3578308.80 frames. ], batch size: 54, lr: 7.04e-03, grad_scale: 8.0 +2023-03-09 08:25:33,923 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6360, 2.1084, 2.6356, 2.6525, 3.1680, 4.9450, 4.6427, 3.5328], + device='cuda:1'), covar=tensor([0.1610, 0.2450, 0.2887, 0.1704, 0.2465, 0.0181, 0.0407, 0.0798], + device='cuda:1'), in_proj_covar=tensor([0.0275, 0.0330, 0.0355, 0.0265, 0.0379, 0.0221, 0.0285, 0.0235], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 08:26:03,201 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57054.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:26:05,453 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57056.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:26:11,277 INFO [train.py:898] (1/4) Epoch 16, batch 2550, loss[loss=0.1796, simple_loss=0.2695, pruned_loss=0.04487, over 18495.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2603, pruned_loss=0.04228, over 3587327.88 frames. ], batch size: 53, lr: 7.04e-03, grad_scale: 8.0 +2023-03-09 08:26:13,775 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.929e+02 2.789e+02 3.538e+02 4.534e+02 7.082e+02, threshold=7.077e+02, percent-clipped=1.0 +2023-03-09 08:26:41,242 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9869, 4.9653, 4.6098, 4.8959, 4.8977, 4.3063, 4.8237, 4.6347], + device='cuda:1'), covar=tensor([0.0388, 0.0468, 0.1331, 0.0746, 0.0579, 0.0465, 0.0429, 0.0947], + device='cuda:1'), in_proj_covar=tensor([0.0454, 0.0513, 0.0667, 0.0411, 0.0406, 0.0474, 0.0503, 0.0634], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 08:27:09,970 INFO [train.py:898] (1/4) Epoch 16, batch 2600, loss[loss=0.1607, simple_loss=0.2508, pruned_loss=0.03524, over 18250.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2602, pruned_loss=0.04203, over 3591341.33 frames. ], batch size: 47, lr: 7.04e-03, grad_scale: 8.0 +2023-03-09 08:28:05,381 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57159.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:28:07,951 INFO [train.py:898] (1/4) Epoch 16, batch 2650, loss[loss=0.196, simple_loss=0.2799, pruned_loss=0.05607, over 18478.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2604, pruned_loss=0.04204, over 3591781.14 frames. ], batch size: 59, lr: 7.04e-03, grad_scale: 4.0 +2023-03-09 08:28:11,697 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.853e+02 2.763e+02 3.335e+02 4.015e+02 1.057e+03, threshold=6.669e+02, percent-clipped=2.0 +2023-03-09 08:28:29,486 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57179.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 08:28:37,453 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57186.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:28:49,736 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57197.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:29:06,054 INFO [train.py:898] (1/4) Epoch 16, batch 2700, loss[loss=0.2404, simple_loss=0.3257, pruned_loss=0.07757, over 12950.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2617, pruned_loss=0.04238, over 3591906.62 frames. ], batch size: 130, lr: 7.03e-03, grad_scale: 4.0 +2023-03-09 08:29:25,715 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57227.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 08:29:43,941 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0468, 4.1864, 2.3271, 4.0152, 5.2749, 2.6711, 3.9213, 3.9715], + device='cuda:1'), covar=tensor([0.0155, 0.1208, 0.1778, 0.0682, 0.0073, 0.1235, 0.0715, 0.0709], + device='cuda:1'), in_proj_covar=tensor([0.0148, 0.0261, 0.0200, 0.0194, 0.0107, 0.0183, 0.0214, 0.0223], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 08:29:44,870 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57244.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:29:48,215 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57247.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:30:01,256 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57258.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 08:30:04,238 INFO [train.py:898] (1/4) Epoch 16, batch 2750, loss[loss=0.1744, simple_loss=0.2663, pruned_loss=0.04121, over 18413.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2611, pruned_loss=0.04226, over 3600175.05 frames. ], batch size: 52, lr: 7.03e-03, grad_scale: 4.0 +2023-03-09 08:30:08,182 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.113e+02 2.970e+02 3.336e+02 3.947e+02 1.031e+03, threshold=6.671e+02, percent-clipped=3.0 +2023-03-09 08:30:08,381 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57264.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:30:46,713 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8774, 3.6938, 5.1781, 3.0995, 4.5352, 2.6239, 3.1106, 1.7750], + device='cuda:1'), covar=tensor([0.1115, 0.0809, 0.0116, 0.0747, 0.0479, 0.2301, 0.2619, 0.2036], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0232, 0.0154, 0.0184, 0.0245, 0.0259, 0.0308, 0.0226], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 08:30:55,685 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57305.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:31:02,574 INFO [train.py:898] (1/4) Epoch 16, batch 2800, loss[loss=0.1673, simple_loss=0.2418, pruned_loss=0.04638, over 18474.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2604, pruned_loss=0.04212, over 3608855.03 frames. ], batch size: 44, lr: 7.03e-03, grad_scale: 8.0 +2023-03-09 08:31:42,724 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9217, 5.1764, 2.5254, 5.0096, 4.9147, 5.1461, 4.9906, 2.5312], + device='cuda:1'), covar=tensor([0.0187, 0.0064, 0.0830, 0.0089, 0.0071, 0.0085, 0.0093, 0.1016], + device='cuda:1'), in_proj_covar=tensor([0.0082, 0.0075, 0.0091, 0.0088, 0.0081, 0.0070, 0.0080, 0.0094], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 08:31:47,152 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57349.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:31:48,375 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.6051, 5.5721, 5.1887, 5.5257, 5.5579, 4.9181, 5.4679, 5.2081], + device='cuda:1'), covar=tensor([0.0416, 0.0381, 0.1326, 0.0785, 0.0430, 0.0372, 0.0361, 0.0870], + device='cuda:1'), in_proj_covar=tensor([0.0456, 0.0517, 0.0672, 0.0414, 0.0408, 0.0475, 0.0501, 0.0639], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 08:31:49,418 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57351.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:31:56,199 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0991, 4.2399, 2.3474, 4.0369, 5.3677, 2.4329, 4.0086, 4.2125], + device='cuda:1'), covar=tensor([0.0139, 0.1307, 0.1841, 0.0738, 0.0059, 0.1558, 0.0668, 0.0615], + device='cuda:1'), in_proj_covar=tensor([0.0147, 0.0259, 0.0198, 0.0192, 0.0107, 0.0181, 0.0212, 0.0221], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 08:31:59,480 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1805, 5.5393, 2.9736, 5.3874, 5.2929, 5.5330, 5.4526, 2.8171], + device='cuda:1'), covar=tensor([0.0153, 0.0084, 0.0633, 0.0073, 0.0065, 0.0086, 0.0078, 0.0897], + device='cuda:1'), in_proj_covar=tensor([0.0082, 0.0075, 0.0091, 0.0088, 0.0081, 0.0070, 0.0080, 0.0094], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 08:32:00,272 INFO [train.py:898] (1/4) Epoch 16, batch 2850, loss[loss=0.1453, simple_loss=0.2301, pruned_loss=0.03028, over 18438.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2603, pruned_loss=0.04226, over 3595800.18 frames. ], batch size: 43, lr: 7.02e-03, grad_scale: 8.0 +2023-03-09 08:32:04,039 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.919e+02 2.832e+02 3.405e+02 3.993e+02 9.421e+02, threshold=6.810e+02, percent-clipped=3.0 +2023-03-09 08:32:44,118 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57398.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:32:58,701 INFO [train.py:898] (1/4) Epoch 16, batch 2900, loss[loss=0.1605, simple_loss=0.2504, pruned_loss=0.03534, over 18532.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2604, pruned_loss=0.04216, over 3601859.51 frames. ], batch size: 49, lr: 7.02e-03, grad_scale: 8.0 +2023-03-09 08:33:04,046 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.82 vs. limit=5.0 +2023-03-09 08:33:55,269 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57459.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:33:55,396 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57459.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:33:57,206 INFO [train.py:898] (1/4) Epoch 16, batch 2950, loss[loss=0.1764, simple_loss=0.2603, pruned_loss=0.04624, over 18365.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2604, pruned_loss=0.04199, over 3599868.08 frames. ], batch size: 46, lr: 7.02e-03, grad_scale: 8.0 +2023-03-09 08:33:58,726 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.6208, 6.0952, 5.5681, 5.8928, 5.7155, 5.5649, 6.1593, 6.0871], + device='cuda:1'), covar=tensor([0.0986, 0.0748, 0.0459, 0.0716, 0.1241, 0.0610, 0.0557, 0.0731], + device='cuda:1'), in_proj_covar=tensor([0.0571, 0.0489, 0.0362, 0.0513, 0.0706, 0.0513, 0.0688, 0.0521], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 08:33:58,815 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57462.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:34:00,797 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.919e+02 2.681e+02 3.202e+02 3.928e+02 6.706e+02, threshold=6.405e+02, percent-clipped=1.0 +2023-03-09 08:34:13,971 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2789, 5.2968, 4.6351, 5.2045, 5.2633, 4.6803, 5.1397, 4.8787], + device='cuda:1'), covar=tensor([0.0613, 0.0609, 0.2040, 0.0989, 0.0719, 0.0540, 0.0554, 0.1163], + device='cuda:1'), in_proj_covar=tensor([0.0457, 0.0518, 0.0673, 0.0414, 0.0406, 0.0475, 0.0504, 0.0639], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 08:34:17,962 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4234, 2.7176, 2.3904, 2.7949, 3.4394, 3.4565, 2.9464, 2.8179], + device='cuda:1'), covar=tensor([0.0218, 0.0271, 0.0596, 0.0336, 0.0209, 0.0174, 0.0357, 0.0399], + device='cuda:1'), in_proj_covar=tensor([0.0129, 0.0127, 0.0159, 0.0148, 0.0118, 0.0105, 0.0147, 0.0145], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 08:34:38,240 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57495.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:34:51,819 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57507.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:34:56,220 INFO [train.py:898] (1/4) Epoch 16, batch 3000, loss[loss=0.1826, simple_loss=0.2745, pruned_loss=0.04541, over 18394.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2601, pruned_loss=0.04187, over 3592885.82 frames. ], batch size: 50, lr: 7.01e-03, grad_scale: 8.0 +2023-03-09 08:34:56,220 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 08:35:02,138 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9243, 3.7790, 4.7229, 2.8470, 4.3085, 2.5382, 2.7982, 1.9679], + device='cuda:1'), covar=tensor([0.0973, 0.0794, 0.0137, 0.0825, 0.0464, 0.2460, 0.2788, 0.2013], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0232, 0.0154, 0.0183, 0.0244, 0.0258, 0.0307, 0.0224], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 08:35:08,108 INFO [train.py:932] (1/4) Epoch 16, validation: loss=0.1522, simple_loss=0.2529, pruned_loss=0.02576, over 944034.00 frames. +2023-03-09 08:35:08,109 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 08:35:16,839 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5613, 5.2478, 5.8352, 5.8065, 5.4384, 6.3050, 5.9620, 5.4018], + device='cuda:1'), covar=tensor([0.0836, 0.0605, 0.0567, 0.0566, 0.1283, 0.0586, 0.0548, 0.1492], + device='cuda:1'), in_proj_covar=tensor([0.0336, 0.0268, 0.0286, 0.0286, 0.0314, 0.0398, 0.0260, 0.0387], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:1') +2023-03-09 08:35:19,796 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8933, 4.4376, 4.5464, 3.4042, 3.7519, 3.6263, 2.6918, 2.4267], + device='cuda:1'), covar=tensor([0.0189, 0.0146, 0.0064, 0.0294, 0.0287, 0.0222, 0.0653, 0.0861], + device='cuda:1'), in_proj_covar=tensor([0.0066, 0.0054, 0.0057, 0.0065, 0.0086, 0.0062, 0.0075, 0.0081], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 08:35:23,931 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57523.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:35:44,929 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57542.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:35:57,249 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57553.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 08:36:00,820 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57556.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:36:05,943 INFO [train.py:898] (1/4) Epoch 16, batch 3050, loss[loss=0.1644, simple_loss=0.2499, pruned_loss=0.03946, over 18280.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2607, pruned_loss=0.04242, over 3594042.69 frames. ], batch size: 49, lr: 7.01e-03, grad_scale: 8.0 +2023-03-09 08:36:09,987 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.073e+02 2.813e+02 3.441e+02 4.204e+02 1.352e+03, threshold=6.882e+02, percent-clipped=6.0 +2023-03-09 08:36:10,226 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57564.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:36:52,916 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57600.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:37:05,423 INFO [train.py:898] (1/4) Epoch 16, batch 3100, loss[loss=0.1707, simple_loss=0.2613, pruned_loss=0.04001, over 18408.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.261, pruned_loss=0.04235, over 3603143.66 frames. ], batch size: 52, lr: 7.01e-03, grad_scale: 8.0 +2023-03-09 08:37:06,734 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57612.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:37:51,289 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57649.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:37:53,474 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57651.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:38:04,647 INFO [train.py:898] (1/4) Epoch 16, batch 3150, loss[loss=0.1587, simple_loss=0.2473, pruned_loss=0.03507, over 18269.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2602, pruned_loss=0.04202, over 3588052.31 frames. ], batch size: 47, lr: 7.01e-03, grad_scale: 8.0 +2023-03-09 08:38:08,020 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.831e+02 2.982e+02 3.518e+02 4.061e+02 7.623e+02, threshold=7.037e+02, percent-clipped=2.0 +2023-03-09 08:38:48,012 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57697.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:38:50,424 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57699.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:39:03,753 INFO [train.py:898] (1/4) Epoch 16, batch 3200, loss[loss=0.1516, simple_loss=0.239, pruned_loss=0.03215, over 18528.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2604, pruned_loss=0.04235, over 3583631.07 frames. ], batch size: 49, lr: 7.00e-03, grad_scale: 8.0 +2023-03-09 08:39:30,519 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57733.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:39:54,283 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57754.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:40:02,272 INFO [train.py:898] (1/4) Epoch 16, batch 3250, loss[loss=0.154, simple_loss=0.2338, pruned_loss=0.03715, over 18467.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2595, pruned_loss=0.04202, over 3587382.10 frames. ], batch size: 44, lr: 7.00e-03, grad_scale: 8.0 +2023-03-09 08:40:05,686 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.974e+02 2.787e+02 3.348e+02 4.125e+02 7.388e+02, threshold=6.695e+02, percent-clipped=1.0 +2023-03-09 08:40:41,830 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57794.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:40:42,942 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4098, 3.2217, 2.0007, 4.2533, 2.9289, 4.1816, 2.2639, 3.8100], + device='cuda:1'), covar=tensor([0.0678, 0.0884, 0.1463, 0.0489, 0.0839, 0.0291, 0.1261, 0.0416], + device='cuda:1'), in_proj_covar=tensor([0.0203, 0.0218, 0.0181, 0.0264, 0.0184, 0.0258, 0.0197, 0.0193], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 08:41:00,970 INFO [train.py:898] (1/4) Epoch 16, batch 3300, loss[loss=0.1878, simple_loss=0.2765, pruned_loss=0.04951, over 18295.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2589, pruned_loss=0.04181, over 3576901.33 frames. ], batch size: 60, lr: 7.00e-03, grad_scale: 8.0 +2023-03-09 08:41:09,103 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57818.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:41:37,670 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57842.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:41:47,768 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57851.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:41:50,162 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57853.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 08:41:59,095 INFO [train.py:898] (1/4) Epoch 16, batch 3350, loss[loss=0.2093, simple_loss=0.2901, pruned_loss=0.06421, over 12961.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2587, pruned_loss=0.04198, over 3587705.33 frames. ], batch size: 129, lr: 6.99e-03, grad_scale: 8.0 +2023-03-09 08:42:02,563 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.007e+02 2.795e+02 3.328e+02 4.480e+02 9.325e+02, threshold=6.655e+02, percent-clipped=2.0 +2023-03-09 08:42:33,261 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57890.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:42:44,959 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57900.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:42:45,999 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57901.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:42:57,126 INFO [train.py:898] (1/4) Epoch 16, batch 3400, loss[loss=0.1866, simple_loss=0.2739, pruned_loss=0.04964, over 18442.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2597, pruned_loss=0.04219, over 3588120.32 frames. ], batch size: 59, lr: 6.99e-03, grad_scale: 8.0 +2023-03-09 08:43:41,006 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57948.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:43:55,559 INFO [train.py:898] (1/4) Epoch 16, batch 3450, loss[loss=0.2171, simple_loss=0.2918, pruned_loss=0.07118, over 12385.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2599, pruned_loss=0.04242, over 3566078.14 frames. ], batch size: 130, lr: 6.99e-03, grad_scale: 8.0 +2023-03-09 08:43:58,791 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.652e+02 2.617e+02 3.263e+02 4.009e+02 9.440e+02, threshold=6.526e+02, percent-clipped=3.0 +2023-03-09 08:44:01,718 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-09 08:44:09,457 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-03-09 08:44:18,203 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1897, 5.6680, 5.3152, 5.4358, 5.2644, 5.1501, 5.6972, 5.6810], + device='cuda:1'), covar=tensor([0.1092, 0.0694, 0.0555, 0.0707, 0.1363, 0.0694, 0.0604, 0.0634], + device='cuda:1'), in_proj_covar=tensor([0.0576, 0.0494, 0.0362, 0.0521, 0.0711, 0.0517, 0.0696, 0.0524], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 08:44:33,037 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.16 vs. limit=5.0 +2023-03-09 08:44:59,190 INFO [train.py:898] (1/4) Epoch 16, batch 3500, loss[loss=0.1884, simple_loss=0.2801, pruned_loss=0.04831, over 18619.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2599, pruned_loss=0.04241, over 3578319.32 frames. ], batch size: 52, lr: 6.98e-03, grad_scale: 8.0 +2023-03-09 08:45:02,168 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.85 vs. limit=5.0 +2023-03-09 08:45:08,633 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6193, 3.3321, 4.3480, 3.9142, 3.0277, 2.8216, 3.9412, 4.5797], + device='cuda:1'), covar=tensor([0.0755, 0.1304, 0.0198, 0.0396, 0.0934, 0.1134, 0.0405, 0.0223], + device='cuda:1'), in_proj_covar=tensor([0.0142, 0.0264, 0.0131, 0.0174, 0.0184, 0.0186, 0.0187, 0.0178], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 08:45:40,003 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-09 08:45:47,986 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58054.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:45:55,321 INFO [train.py:898] (1/4) Epoch 16, batch 3550, loss[loss=0.1741, simple_loss=0.2579, pruned_loss=0.0451, over 18361.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2593, pruned_loss=0.04218, over 3588077.91 frames. ], batch size: 46, lr: 6.98e-03, grad_scale: 8.0 +2023-03-09 08:45:58,564 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.098e+02 2.999e+02 3.554e+02 4.304e+02 1.121e+03, threshold=7.108e+02, percent-clipped=3.0 +2023-03-09 08:46:05,922 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9568, 2.9329, 4.3308, 4.0189, 2.6580, 4.7646, 4.1516, 3.0013], + device='cuda:1'), covar=tensor([0.0416, 0.1446, 0.0362, 0.0347, 0.1557, 0.0179, 0.0426, 0.0973], + device='cuda:1'), in_proj_covar=tensor([0.0208, 0.0238, 0.0196, 0.0157, 0.0226, 0.0207, 0.0240, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 08:46:26,681 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58089.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:46:38,473 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6081, 5.1025, 5.0377, 5.0939, 4.5924, 4.9693, 4.3939, 4.9754], + device='cuda:1'), covar=tensor([0.0256, 0.0321, 0.0235, 0.0430, 0.0423, 0.0240, 0.1237, 0.0334], + device='cuda:1'), in_proj_covar=tensor([0.0198, 0.0247, 0.0240, 0.0300, 0.0254, 0.0251, 0.0300, 0.0242], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 08:46:40,535 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58102.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:46:50,129 INFO [train.py:898] (1/4) Epoch 16, batch 3600, loss[loss=0.1589, simple_loss=0.2478, pruned_loss=0.035, over 18391.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2588, pruned_loss=0.04187, over 3585608.15 frames. ], batch size: 48, lr: 6.98e-03, grad_scale: 8.0 +2023-03-09 08:46:58,053 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58118.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:47:21,673 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58141.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:47:54,109 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58144.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:47:54,859 INFO [train.py:898] (1/4) Epoch 17, batch 0, loss[loss=0.1784, simple_loss=0.2732, pruned_loss=0.04184, over 18628.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2732, pruned_loss=0.04184, over 18628.00 frames. ], batch size: 52, lr: 6.77e-03, grad_scale: 8.0 +2023-03-09 08:47:54,859 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 08:48:01,142 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.4075, 2.0834, 2.3139, 2.3901, 2.8344, 4.1830, 3.9536, 2.8357], + device='cuda:1'), covar=tensor([0.1847, 0.2582, 0.2945, 0.1992, 0.2396, 0.0282, 0.0491, 0.0993], + device='cuda:1'), in_proj_covar=tensor([0.0276, 0.0330, 0.0356, 0.0264, 0.0376, 0.0218, 0.0282, 0.0234], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 08:48:01,428 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([6.2756, 6.6170, 6.0646, 6.4843, 6.2995, 6.2236, 6.6541, 6.6605], + device='cuda:1'), covar=tensor([0.0863, 0.0581, 0.0269, 0.0528, 0.1021, 0.0500, 0.0385, 0.0548], + device='cuda:1'), in_proj_covar=tensor([0.0578, 0.0495, 0.0361, 0.0524, 0.0711, 0.0516, 0.0696, 0.0525], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 08:48:03,426 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4129, 2.8738, 2.6693, 2.8914, 3.5343, 3.5011, 3.1120, 2.9732], + device='cuda:1'), covar=tensor([0.0177, 0.0279, 0.0567, 0.0380, 0.0176, 0.0156, 0.0352, 0.0377], + device='cuda:1'), in_proj_covar=tensor([0.0129, 0.0127, 0.0158, 0.0149, 0.0115, 0.0103, 0.0146, 0.0145], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 08:48:06,680 INFO [train.py:932] (1/4) Epoch 17, validation: loss=0.1527, simple_loss=0.2537, pruned_loss=0.02582, over 944034.00 frames. +2023-03-09 08:48:06,680 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 08:48:13,730 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58151.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:48:30,203 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.965e+02 2.893e+02 3.453e+02 4.374e+02 8.967e+02, threshold=6.906e+02, percent-clipped=3.0 +2023-03-09 08:48:32,616 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58166.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:49:05,025 INFO [train.py:898] (1/4) Epoch 17, batch 50, loss[loss=0.1503, simple_loss=0.2252, pruned_loss=0.03774, over 17662.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2651, pruned_loss=0.04339, over 812182.13 frames. ], batch size: 39, lr: 6.76e-03, grad_scale: 8.0 +2023-03-09 08:49:06,430 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9601, 4.9424, 5.0637, 4.8366, 4.7244, 4.8287, 5.1570, 5.1162], + device='cuda:1'), covar=tensor([0.0074, 0.0076, 0.0059, 0.0106, 0.0069, 0.0158, 0.0094, 0.0119], + device='cuda:1'), in_proj_covar=tensor([0.0085, 0.0062, 0.0066, 0.0084, 0.0068, 0.0094, 0.0079, 0.0079], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 08:49:09,581 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58199.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:49:13,617 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58202.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:49:16,953 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58205.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:50:03,222 INFO [train.py:898] (1/4) Epoch 17, batch 100, loss[loss=0.1671, simple_loss=0.2609, pruned_loss=0.03672, over 16267.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.262, pruned_loss=0.04236, over 1430633.88 frames. ], batch size: 94, lr: 6.76e-03, grad_scale: 8.0 +2023-03-09 08:50:26,008 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.069e+02 2.900e+02 3.440e+02 4.043e+02 9.296e+02, threshold=6.881e+02, percent-clipped=1.0 +2023-03-09 08:50:36,431 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3960, 5.3087, 5.6214, 5.6682, 5.3161, 6.2193, 5.8671, 5.5152], + device='cuda:1'), covar=tensor([0.0994, 0.0609, 0.0682, 0.0570, 0.1502, 0.0673, 0.0562, 0.1695], + device='cuda:1'), in_proj_covar=tensor([0.0334, 0.0266, 0.0284, 0.0283, 0.0311, 0.0394, 0.0258, 0.0386], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:1') +2023-03-09 08:51:02,238 INFO [train.py:898] (1/4) Epoch 17, batch 150, loss[loss=0.1875, simple_loss=0.277, pruned_loss=0.04899, over 18307.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2612, pruned_loss=0.04205, over 1914395.03 frames. ], batch size: 57, lr: 6.76e-03, grad_scale: 8.0 +2023-03-09 08:52:01,292 INFO [train.py:898] (1/4) Epoch 17, batch 200, loss[loss=0.1863, simple_loss=0.274, pruned_loss=0.04929, over 18291.00 frames. ], tot_loss[loss=0.174, simple_loss=0.262, pruned_loss=0.04296, over 2279146.73 frames. ], batch size: 57, lr: 6.75e-03, grad_scale: 8.0 +2023-03-09 08:52:22,847 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.674e+02 2.879e+02 3.254e+02 3.939e+02 7.173e+02, threshold=6.508e+02, percent-clipped=1.0 +2023-03-09 08:52:53,472 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58389.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:52:59,837 INFO [train.py:898] (1/4) Epoch 17, batch 250, loss[loss=0.1505, simple_loss=0.2317, pruned_loss=0.0347, over 18514.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2595, pruned_loss=0.04158, over 2585637.81 frames. ], batch size: 44, lr: 6.75e-03, grad_scale: 8.0 +2023-03-09 08:53:36,358 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.36 vs. limit=5.0 +2023-03-09 08:53:50,086 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58437.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:53:59,033 INFO [train.py:898] (1/4) Epoch 17, batch 300, loss[loss=0.2277, simple_loss=0.3077, pruned_loss=0.07383, over 13035.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2605, pruned_loss=0.04202, over 2793544.01 frames. ], batch size: 130, lr: 6.75e-03, grad_scale: 8.0 +2023-03-09 08:54:20,579 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.698e+02 2.907e+02 3.543e+02 4.450e+02 1.655e+03, threshold=7.087e+02, percent-clipped=7.0 +2023-03-09 08:54:32,399 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2034, 5.1631, 4.7298, 5.1044, 5.0910, 4.4962, 4.9439, 4.7534], + device='cuda:1'), covar=tensor([0.0415, 0.0449, 0.1517, 0.0766, 0.0590, 0.0445, 0.0497, 0.1182], + device='cuda:1'), in_proj_covar=tensor([0.0453, 0.0520, 0.0664, 0.0416, 0.0408, 0.0475, 0.0505, 0.0637], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 08:54:44,886 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5807, 3.5761, 4.9870, 4.4004, 3.3155, 3.0306, 4.4905, 5.2032], + device='cuda:1'), covar=tensor([0.0833, 0.1643, 0.0133, 0.0365, 0.0873, 0.1116, 0.0327, 0.0210], + device='cuda:1'), in_proj_covar=tensor([0.0143, 0.0262, 0.0130, 0.0174, 0.0185, 0.0184, 0.0185, 0.0177], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 08:54:52,715 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5775, 3.5983, 5.0412, 4.4643, 3.2262, 3.0533, 4.4791, 5.2325], + device='cuda:1'), covar=tensor([0.0834, 0.1658, 0.0157, 0.0346, 0.0931, 0.1153, 0.0351, 0.0270], + device='cuda:1'), in_proj_covar=tensor([0.0143, 0.0262, 0.0130, 0.0173, 0.0185, 0.0184, 0.0185, 0.0177], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 08:54:57,865 INFO [train.py:898] (1/4) Epoch 17, batch 350, loss[loss=0.1865, simple_loss=0.2802, pruned_loss=0.04639, over 18273.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2599, pruned_loss=0.04178, over 2976016.71 frames. ], batch size: 60, lr: 6.75e-03, grad_scale: 8.0 +2023-03-09 08:55:00,143 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58497.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:55:03,537 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58500.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:55:47,915 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5650, 3.2945, 2.1398, 4.3740, 2.8846, 4.0894, 2.1607, 3.7684], + device='cuda:1'), covar=tensor([0.0573, 0.0911, 0.1561, 0.0538, 0.0978, 0.0393, 0.1399, 0.0524], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0222, 0.0187, 0.0270, 0.0189, 0.0262, 0.0201, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 08:55:56,653 INFO [train.py:898] (1/4) Epoch 17, batch 400, loss[loss=0.1831, simple_loss=0.2763, pruned_loss=0.04495, over 17851.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2601, pruned_loss=0.04169, over 3115140.25 frames. ], batch size: 65, lr: 6.74e-03, grad_scale: 8.0 +2023-03-09 08:56:18,008 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.966e+02 2.743e+02 3.133e+02 4.342e+02 9.924e+02, threshold=6.265e+02, percent-clipped=3.0 +2023-03-09 08:56:54,616 INFO [train.py:898] (1/4) Epoch 17, batch 450, loss[loss=0.2015, simple_loss=0.2898, pruned_loss=0.0566, over 17955.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2605, pruned_loss=0.0418, over 3221127.43 frames. ], batch size: 65, lr: 6.74e-03, grad_scale: 8.0 +2023-03-09 08:56:55,959 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2439, 5.1695, 5.4879, 5.5406, 5.1526, 6.0616, 5.6801, 5.4273], + device='cuda:1'), covar=tensor([0.1037, 0.0628, 0.0748, 0.0679, 0.1414, 0.0687, 0.0614, 0.1478], + device='cuda:1'), in_proj_covar=tensor([0.0336, 0.0267, 0.0286, 0.0285, 0.0312, 0.0395, 0.0259, 0.0385], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:1') +2023-03-09 08:57:21,393 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 08:57:40,609 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9996, 4.9628, 4.6026, 4.8592, 4.8887, 4.3049, 4.7876, 4.5862], + device='cuda:1'), covar=tensor([0.0384, 0.0431, 0.1279, 0.0848, 0.0552, 0.0405, 0.0463, 0.1052], + device='cuda:1'), in_proj_covar=tensor([0.0454, 0.0521, 0.0668, 0.0417, 0.0408, 0.0476, 0.0507, 0.0639], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 08:57:52,503 INFO [train.py:898] (1/4) Epoch 17, batch 500, loss[loss=0.1903, simple_loss=0.2742, pruned_loss=0.05313, over 18348.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2607, pruned_loss=0.04192, over 3312785.63 frames. ], batch size: 56, lr: 6.74e-03, grad_scale: 8.0 +2023-03-09 08:58:00,996 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.22 vs. limit=5.0 +2023-03-09 08:58:13,807 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.815e+02 2.817e+02 3.097e+02 3.778e+02 7.071e+02, threshold=6.194e+02, percent-clipped=1.0 +2023-03-09 08:58:33,079 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58681.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:58:49,767 INFO [train.py:898] (1/4) Epoch 17, batch 550, loss[loss=0.162, simple_loss=0.2525, pruned_loss=0.03578, over 17013.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2606, pruned_loss=0.04199, over 3369803.25 frames. ], batch size: 78, lr: 6.73e-03, grad_scale: 8.0 +2023-03-09 08:59:04,683 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-09 08:59:44,837 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58742.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:59:47,843 INFO [train.py:898] (1/4) Epoch 17, batch 600, loss[loss=0.1655, simple_loss=0.2536, pruned_loss=0.03877, over 18383.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2602, pruned_loss=0.04181, over 3402106.82 frames. ], batch size: 50, lr: 6.73e-03, grad_scale: 8.0 +2023-03-09 09:00:09,620 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.181e+02 2.820e+02 3.241e+02 3.843e+02 6.469e+02, threshold=6.481e+02, percent-clipped=2.0 +2023-03-09 09:00:15,966 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.50 vs. limit=5.0 +2023-03-09 09:00:17,819 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3871, 5.3273, 4.9326, 5.2435, 5.2792, 4.6261, 5.1365, 4.9301], + device='cuda:1'), covar=tensor([0.0364, 0.0398, 0.1221, 0.0875, 0.0510, 0.0406, 0.0427, 0.1047], + device='cuda:1'), in_proj_covar=tensor([0.0454, 0.0519, 0.0665, 0.0413, 0.0406, 0.0474, 0.0506, 0.0636], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 09:00:22,444 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58775.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:00:45,234 INFO [train.py:898] (1/4) Epoch 17, batch 650, loss[loss=0.1707, simple_loss=0.2677, pruned_loss=0.03686, over 17719.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2592, pruned_loss=0.04159, over 3440382.22 frames. ], batch size: 70, lr: 6.73e-03, grad_scale: 8.0 +2023-03-09 09:00:48,330 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58797.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:00:52,715 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58800.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:01:17,485 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9548, 4.9220, 5.0326, 4.7742, 4.7357, 4.7850, 5.1502, 5.1281], + device='cuda:1'), covar=tensor([0.0068, 0.0061, 0.0055, 0.0100, 0.0064, 0.0139, 0.0069, 0.0084], + device='cuda:1'), in_proj_covar=tensor([0.0087, 0.0064, 0.0067, 0.0086, 0.0069, 0.0096, 0.0080, 0.0081], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003], + device='cuda:1') +2023-03-09 09:01:34,490 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58836.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:01:44,399 INFO [train.py:898] (1/4) Epoch 17, batch 700, loss[loss=0.1569, simple_loss=0.2426, pruned_loss=0.0356, over 18493.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2588, pruned_loss=0.04146, over 3476533.11 frames. ], batch size: 47, lr: 6.73e-03, grad_scale: 8.0 +2023-03-09 09:01:44,611 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58845.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:01:48,375 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58848.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:01:48,828 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-09 09:02:07,827 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.002e+02 2.825e+02 3.273e+02 3.706e+02 6.863e+02, threshold=6.547e+02, percent-clipped=2.0 +2023-03-09 09:02:09,405 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.17 vs. limit=5.0 +2023-03-09 09:02:42,597 INFO [train.py:898] (1/4) Epoch 17, batch 750, loss[loss=0.1535, simple_loss=0.2443, pruned_loss=0.03136, over 18359.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2592, pruned_loss=0.04148, over 3505628.08 frames. ], batch size: 50, lr: 6.72e-03, grad_scale: 8.0 +2023-03-09 09:02:56,525 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58906.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:03:40,661 INFO [train.py:898] (1/4) Epoch 17, batch 800, loss[loss=0.1792, simple_loss=0.276, pruned_loss=0.04118, over 18362.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2584, pruned_loss=0.04105, over 3534864.31 frames. ], batch size: 55, lr: 6.72e-03, grad_scale: 8.0 +2023-03-09 09:04:04,581 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.057e+02 2.797e+02 3.307e+02 3.815e+02 9.263e+02, threshold=6.613e+02, percent-clipped=2.0 +2023-03-09 09:04:08,293 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58967.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:04:20,990 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-03-09 09:04:29,968 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4823, 6.0389, 5.4836, 5.8157, 5.6040, 5.4481, 6.0532, 6.0201], + device='cuda:1'), covar=tensor([0.1121, 0.0584, 0.0477, 0.0648, 0.1151, 0.0706, 0.0507, 0.0622], + device='cuda:1'), in_proj_covar=tensor([0.0587, 0.0502, 0.0366, 0.0530, 0.0720, 0.0525, 0.0710, 0.0533], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 09:04:38,750 INFO [train.py:898] (1/4) Epoch 17, batch 850, loss[loss=0.1853, simple_loss=0.2679, pruned_loss=0.0513, over 17073.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.258, pruned_loss=0.04091, over 3560105.79 frames. ], batch size: 78, lr: 6.72e-03, grad_scale: 8.0 +2023-03-09 09:04:40,708 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.21 vs. limit=5.0 +2023-03-09 09:04:42,444 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7434, 5.3039, 5.2924, 5.2539, 4.7926, 5.1665, 4.5691, 5.1545], + device='cuda:1'), covar=tensor([0.0256, 0.0257, 0.0183, 0.0434, 0.0419, 0.0222, 0.1131, 0.0321], + device='cuda:1'), in_proj_covar=tensor([0.0203, 0.0248, 0.0240, 0.0305, 0.0257, 0.0251, 0.0300, 0.0244], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 09:05:28,782 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59037.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:05:37,752 INFO [train.py:898] (1/4) Epoch 17, batch 900, loss[loss=0.1535, simple_loss=0.2508, pruned_loss=0.02807, over 18295.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2575, pruned_loss=0.04056, over 3577708.51 frames. ], batch size: 54, lr: 6.71e-03, grad_scale: 8.0 +2023-03-09 09:05:59,618 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.891e+02 2.805e+02 3.225e+02 3.970e+02 5.951e+02, threshold=6.451e+02, percent-clipped=0.0 +2023-03-09 09:06:36,334 INFO [train.py:898] (1/4) Epoch 17, batch 950, loss[loss=0.1651, simple_loss=0.2613, pruned_loss=0.03443, over 18298.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2577, pruned_loss=0.04049, over 3586290.97 frames. ], batch size: 54, lr: 6.71e-03, grad_scale: 8.0 +2023-03-09 09:07:14,652 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-09 09:07:17,736 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5775, 2.1719, 2.5326, 2.7034, 3.0178, 4.5130, 4.3617, 3.3372], + device='cuda:1'), covar=tensor([0.1635, 0.2428, 0.2728, 0.1705, 0.2232, 0.0249, 0.0406, 0.0806], + device='cuda:1'), in_proj_covar=tensor([0.0276, 0.0331, 0.0360, 0.0265, 0.0378, 0.0222, 0.0285, 0.0236], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 09:07:19,808 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59131.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:07:23,770 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-03-09 09:07:35,505 INFO [train.py:898] (1/4) Epoch 17, batch 1000, loss[loss=0.1652, simple_loss=0.2559, pruned_loss=0.03721, over 17973.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2571, pruned_loss=0.04011, over 3594860.30 frames. ], batch size: 65, lr: 6.71e-03, grad_scale: 16.0 +2023-03-09 09:07:56,827 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.800e+02 2.712e+02 3.112e+02 3.820e+02 1.157e+03, threshold=6.224e+02, percent-clipped=3.0 +2023-03-09 09:07:57,127 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59164.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:08:33,587 INFO [train.py:898] (1/4) Epoch 17, batch 1050, loss[loss=0.1555, simple_loss=0.2351, pruned_loss=0.03794, over 18252.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2583, pruned_loss=0.04084, over 3585673.85 frames. ], batch size: 45, lr: 6.71e-03, grad_scale: 16.0 +2023-03-09 09:08:43,541 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2238, 4.3115, 2.5732, 4.1930, 5.3402, 2.8928, 4.0106, 4.2430], + device='cuda:1'), covar=tensor([0.0112, 0.1146, 0.1493, 0.0592, 0.0063, 0.1050, 0.0565, 0.0579], + device='cuda:1'), in_proj_covar=tensor([0.0147, 0.0256, 0.0196, 0.0189, 0.0108, 0.0176, 0.0208, 0.0215], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 09:08:51,270 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1088, 5.1011, 5.2240, 4.9203, 4.9872, 4.9780, 5.3572, 5.3359], + device='cuda:1'), covar=tensor([0.0064, 0.0055, 0.0050, 0.0096, 0.0057, 0.0137, 0.0061, 0.0084], + device='cuda:1'), in_proj_covar=tensor([0.0087, 0.0064, 0.0068, 0.0087, 0.0070, 0.0097, 0.0081, 0.0081], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003], + device='cuda:1') +2023-03-09 09:09:06,531 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.6094, 6.1400, 5.6074, 5.9606, 5.7646, 5.6043, 6.2321, 6.1306], + device='cuda:1'), covar=tensor([0.1175, 0.0783, 0.0417, 0.0655, 0.1394, 0.0649, 0.0528, 0.0697], + device='cuda:1'), in_proj_covar=tensor([0.0588, 0.0504, 0.0368, 0.0531, 0.0722, 0.0527, 0.0710, 0.0534], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 09:09:08,979 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59225.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:09:09,176 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-03-09 09:09:32,434 INFO [train.py:898] (1/4) Epoch 17, batch 1100, loss[loss=0.1576, simple_loss=0.2396, pruned_loss=0.0378, over 18478.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2584, pruned_loss=0.04114, over 3584358.25 frames. ], batch size: 44, lr: 6.70e-03, grad_scale: 16.0 +2023-03-09 09:09:52,014 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59262.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:09:54,103 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.737e+02 2.749e+02 3.337e+02 4.015e+02 7.609e+02, threshold=6.673e+02, percent-clipped=2.0 +2023-03-09 09:10:31,689 INFO [train.py:898] (1/4) Epoch 17, batch 1150, loss[loss=0.1779, simple_loss=0.264, pruned_loss=0.0459, over 18340.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2589, pruned_loss=0.04154, over 3586320.72 frames. ], batch size: 46, lr: 6.70e-03, grad_scale: 16.0 +2023-03-09 09:11:19,746 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59337.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:11:29,507 INFO [train.py:898] (1/4) Epoch 17, batch 1200, loss[loss=0.1768, simple_loss=0.2704, pruned_loss=0.04167, over 18484.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2592, pruned_loss=0.04162, over 3583452.22 frames. ], batch size: 51, lr: 6.70e-03, grad_scale: 16.0 +2023-03-09 09:11:42,571 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6629, 3.7185, 5.0919, 4.4858, 3.2743, 3.1670, 4.3907, 5.3131], + device='cuda:1'), covar=tensor([0.0900, 0.1576, 0.0142, 0.0342, 0.0907, 0.1034, 0.0357, 0.0265], + device='cuda:1'), in_proj_covar=tensor([0.0144, 0.0262, 0.0132, 0.0173, 0.0184, 0.0184, 0.0186, 0.0177], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 09:11:51,082 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.028e+02 2.851e+02 3.293e+02 4.093e+02 6.542e+02, threshold=6.585e+02, percent-clipped=0.0 +2023-03-09 09:12:15,543 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59385.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:12:25,304 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5893, 2.4648, 2.6777, 2.7817, 3.4302, 5.2170, 5.0676, 3.6456], + device='cuda:1'), covar=tensor([0.1642, 0.2222, 0.2800, 0.1597, 0.2082, 0.0143, 0.0280, 0.0777], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0334, 0.0362, 0.0265, 0.0379, 0.0223, 0.0286, 0.0236], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 09:12:28,178 INFO [train.py:898] (1/4) Epoch 17, batch 1250, loss[loss=0.2001, simple_loss=0.2895, pruned_loss=0.05539, over 17160.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2592, pruned_loss=0.04167, over 3586551.66 frames. ], batch size: 78, lr: 6.69e-03, grad_scale: 8.0 +2023-03-09 09:13:05,700 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3997, 5.8728, 5.4494, 5.7039, 5.5254, 5.4215, 6.0060, 5.9545], + device='cuda:1'), covar=tensor([0.1214, 0.0941, 0.0486, 0.0801, 0.1449, 0.0700, 0.0602, 0.0717], + device='cuda:1'), in_proj_covar=tensor([0.0587, 0.0504, 0.0366, 0.0527, 0.0722, 0.0524, 0.0706, 0.0531], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 09:13:09,127 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59431.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:13:21,967 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5894, 2.2089, 2.5285, 2.7370, 3.1165, 4.9243, 4.7399, 3.3615], + device='cuda:1'), covar=tensor([0.1738, 0.2428, 0.2913, 0.1650, 0.2461, 0.0200, 0.0351, 0.0860], + device='cuda:1'), in_proj_covar=tensor([0.0279, 0.0334, 0.0363, 0.0266, 0.0381, 0.0223, 0.0286, 0.0237], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 09:13:26,495 INFO [train.py:898] (1/4) Epoch 17, batch 1300, loss[loss=0.1786, simple_loss=0.2703, pruned_loss=0.04343, over 18495.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2587, pruned_loss=0.04135, over 3582274.95 frames. ], batch size: 51, lr: 6.69e-03, grad_scale: 8.0 +2023-03-09 09:13:48,933 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.812e+02 2.789e+02 3.310e+02 4.090e+02 6.733e+02, threshold=6.621e+02, percent-clipped=2.0 +2023-03-09 09:14:05,083 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59479.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:14:06,363 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4305, 5.9931, 5.4221, 5.7923, 5.6000, 5.5171, 6.0695, 6.0163], + device='cuda:1'), covar=tensor([0.1145, 0.0689, 0.0465, 0.0671, 0.1198, 0.0613, 0.0495, 0.0584], + device='cuda:1'), in_proj_covar=tensor([0.0585, 0.0502, 0.0364, 0.0524, 0.0721, 0.0523, 0.0704, 0.0530], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 09:14:24,116 INFO [train.py:898] (1/4) Epoch 17, batch 1350, loss[loss=0.1995, simple_loss=0.2762, pruned_loss=0.0614, over 12858.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2594, pruned_loss=0.04149, over 3583108.52 frames. ], batch size: 131, lr: 6.69e-03, grad_scale: 8.0 +2023-03-09 09:14:33,565 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59502.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:14:47,664 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-09 09:14:53,677 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59520.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:15:17,235 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.21 vs. limit=5.0 +2023-03-09 09:15:22,957 INFO [train.py:898] (1/4) Epoch 17, batch 1400, loss[loss=0.1531, simple_loss=0.2336, pruned_loss=0.03633, over 18442.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.259, pruned_loss=0.04125, over 3580550.80 frames. ], batch size: 43, lr: 6.69e-03, grad_scale: 8.0 +2023-03-09 09:15:43,356 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59562.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:15:44,567 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59563.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:15:46,381 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.823e+02 2.959e+02 3.481e+02 4.444e+02 9.729e+02, threshold=6.962e+02, percent-clipped=6.0 +2023-03-09 09:16:19,251 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3207, 3.2436, 2.0619, 4.0768, 2.7734, 3.8794, 2.1161, 3.4573], + device='cuda:1'), covar=tensor([0.0621, 0.0797, 0.1378, 0.0422, 0.0853, 0.0302, 0.1265, 0.0456], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0216, 0.0184, 0.0267, 0.0187, 0.0261, 0.0199, 0.0193], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 09:16:21,024 INFO [train.py:898] (1/4) Epoch 17, batch 1450, loss[loss=0.1845, simple_loss=0.2763, pruned_loss=0.04635, over 18158.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2591, pruned_loss=0.041, over 3587285.31 frames. ], batch size: 62, lr: 6.68e-03, grad_scale: 8.0 +2023-03-09 09:16:28,572 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-09 09:16:40,244 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59610.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:16:41,527 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1902, 5.1640, 4.7655, 5.1319, 5.0749, 4.4957, 5.0213, 4.7806], + device='cuda:1'), covar=tensor([0.0421, 0.0509, 0.1461, 0.0703, 0.0624, 0.0476, 0.0422, 0.1113], + device='cuda:1'), in_proj_covar=tensor([0.0462, 0.0521, 0.0676, 0.0414, 0.0412, 0.0482, 0.0509, 0.0642], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 09:17:20,445 INFO [train.py:898] (1/4) Epoch 17, batch 1500, loss[loss=0.1558, simple_loss=0.2433, pruned_loss=0.03409, over 18383.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2592, pruned_loss=0.04105, over 3593838.19 frames. ], batch size: 46, lr: 6.68e-03, grad_scale: 8.0 +2023-03-09 09:17:44,116 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.960e+02 2.886e+02 3.557e+02 4.310e+02 1.324e+03, threshold=7.115e+02, percent-clipped=4.0 +2023-03-09 09:18:10,311 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-09 09:18:18,453 INFO [train.py:898] (1/4) Epoch 17, batch 1550, loss[loss=0.1564, simple_loss=0.2427, pruned_loss=0.03507, over 18356.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2595, pruned_loss=0.04129, over 3583598.62 frames. ], batch size: 46, lr: 6.68e-03, grad_scale: 8.0 +2023-03-09 09:19:04,774 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7628, 4.4265, 4.5653, 3.3632, 3.7134, 3.4300, 2.4340, 2.5762], + device='cuda:1'), covar=tensor([0.0211, 0.0166, 0.0077, 0.0293, 0.0335, 0.0205, 0.0805, 0.0784], + device='cuda:1'), in_proj_covar=tensor([0.0067, 0.0055, 0.0058, 0.0065, 0.0087, 0.0063, 0.0075, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 09:19:16,813 INFO [train.py:898] (1/4) Epoch 17, batch 1600, loss[loss=0.1591, simple_loss=0.229, pruned_loss=0.04461, over 18088.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2589, pruned_loss=0.04099, over 3588924.06 frames. ], batch size: 40, lr: 6.67e-03, grad_scale: 8.0 +2023-03-09 09:19:24,900 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-09 09:19:41,445 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.908e+02 2.635e+02 3.092e+02 3.637e+02 7.676e+02, threshold=6.183e+02, percent-clipped=1.0 +2023-03-09 09:20:15,880 INFO [train.py:898] (1/4) Epoch 17, batch 1650, loss[loss=0.1399, simple_loss=0.2204, pruned_loss=0.02974, over 18489.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2588, pruned_loss=0.04086, over 3570625.49 frames. ], batch size: 44, lr: 6.67e-03, grad_scale: 8.0 +2023-03-09 09:20:46,575 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59820.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:20:52,632 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.56 vs. limit=2.0 +2023-03-09 09:21:14,487 INFO [train.py:898] (1/4) Epoch 17, batch 1700, loss[loss=0.1715, simple_loss=0.2659, pruned_loss=0.03854, over 18587.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2586, pruned_loss=0.04086, over 3582916.26 frames. ], batch size: 54, lr: 6.67e-03, grad_scale: 8.0 +2023-03-09 09:21:18,250 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59848.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:21:30,998 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59858.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:21:38,856 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.932e+02 2.816e+02 3.196e+02 3.802e+02 1.399e+03, threshold=6.391e+02, percent-clipped=5.0 +2023-03-09 09:21:43,515 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59868.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:22:07,232 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59889.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:22:13,228 INFO [train.py:898] (1/4) Epoch 17, batch 1750, loss[loss=0.1905, simple_loss=0.2781, pruned_loss=0.05146, over 18352.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2587, pruned_loss=0.04101, over 3576341.58 frames. ], batch size: 55, lr: 6.67e-03, grad_scale: 8.0 +2023-03-09 09:22:14,680 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9279, 4.5931, 4.6590, 3.6527, 3.9319, 3.6622, 2.8993, 2.5966], + device='cuda:1'), covar=tensor([0.0170, 0.0145, 0.0070, 0.0246, 0.0252, 0.0171, 0.0573, 0.0784], + device='cuda:1'), in_proj_covar=tensor([0.0066, 0.0055, 0.0058, 0.0065, 0.0087, 0.0063, 0.0075, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 09:22:28,171 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-09 09:22:30,628 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59909.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:22:32,709 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3520, 5.9315, 5.4689, 5.6788, 5.4877, 5.3894, 5.9607, 5.9094], + device='cuda:1'), covar=tensor([0.1333, 0.0630, 0.0515, 0.0641, 0.1516, 0.0633, 0.0612, 0.0623], + device='cuda:1'), in_proj_covar=tensor([0.0586, 0.0499, 0.0367, 0.0523, 0.0723, 0.0524, 0.0710, 0.0534], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 09:22:38,377 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5051, 3.4812, 2.3382, 4.3311, 3.1525, 4.3551, 2.3843, 3.9271], + device='cuda:1'), covar=tensor([0.0631, 0.0799, 0.1454, 0.0513, 0.0844, 0.0317, 0.1196, 0.0427], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0217, 0.0186, 0.0270, 0.0188, 0.0261, 0.0199, 0.0193], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 09:22:56,359 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4186, 3.8825, 3.9078, 3.0520, 3.3536, 3.1016, 2.4091, 2.3515], + device='cuda:1'), covar=tensor([0.0225, 0.0184, 0.0119, 0.0331, 0.0367, 0.0253, 0.0740, 0.0826], + device='cuda:1'), in_proj_covar=tensor([0.0067, 0.0055, 0.0058, 0.0065, 0.0087, 0.0063, 0.0076, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 09:23:11,677 INFO [train.py:898] (1/4) Epoch 17, batch 1800, loss[loss=0.1488, simple_loss=0.2301, pruned_loss=0.0338, over 18486.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2584, pruned_loss=0.04075, over 3582204.92 frames. ], batch size: 44, lr: 6.66e-03, grad_scale: 8.0 +2023-03-09 09:23:17,897 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59950.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:23:35,352 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.961e+02 2.799e+02 3.065e+02 3.642e+02 5.911e+02, threshold=6.130e+02, percent-clipped=0.0 +2023-03-09 09:24:09,563 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4040, 3.3588, 3.3111, 2.8167, 3.1810, 2.4936, 2.5270, 3.2178], + device='cuda:1'), covar=tensor([0.0076, 0.0111, 0.0100, 0.0179, 0.0114, 0.0262, 0.0259, 0.0092], + device='cuda:1'), in_proj_covar=tensor([0.0124, 0.0146, 0.0125, 0.0178, 0.0130, 0.0169, 0.0173, 0.0109], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 09:24:10,360 INFO [train.py:898] (1/4) Epoch 17, batch 1850, loss[loss=0.1662, simple_loss=0.263, pruned_loss=0.03472, over 18298.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2576, pruned_loss=0.04058, over 3583187.22 frames. ], batch size: 54, lr: 6.66e-03, grad_scale: 8.0 +2023-03-09 09:25:08,325 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60040.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:25:13,961 INFO [train.py:898] (1/4) Epoch 17, batch 1900, loss[loss=0.1519, simple_loss=0.2354, pruned_loss=0.03418, over 17618.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2576, pruned_loss=0.04069, over 3569810.54 frames. ], batch size: 39, lr: 6.66e-03, grad_scale: 8.0 +2023-03-09 09:25:22,392 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60052.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:25:37,666 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.972e+02 2.751e+02 3.346e+02 4.320e+02 1.006e+03, threshold=6.692e+02, percent-clipped=5.0 +2023-03-09 09:25:37,988 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60065.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:25:53,016 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60078.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:26:12,274 INFO [train.py:898] (1/4) Epoch 17, batch 1950, loss[loss=0.1912, simple_loss=0.2838, pruned_loss=0.04932, over 18315.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2582, pruned_loss=0.04094, over 3574256.35 frames. ], batch size: 54, lr: 6.66e-03, grad_scale: 8.0 +2023-03-09 09:26:19,430 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60101.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 09:26:29,885 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.97 vs. limit=5.0 +2023-03-09 09:26:33,392 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60113.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 09:26:48,306 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60126.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:27:04,090 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60139.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:27:10,622 INFO [train.py:898] (1/4) Epoch 17, batch 2000, loss[loss=0.1511, simple_loss=0.2411, pruned_loss=0.03057, over 18148.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2588, pruned_loss=0.04138, over 3581493.37 frames. ], batch size: 44, lr: 6.65e-03, grad_scale: 8.0 +2023-03-09 09:27:25,234 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60158.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:27:33,339 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.223e+02 2.887e+02 3.368e+02 4.161e+02 9.381e+02, threshold=6.736e+02, percent-clipped=4.0 +2023-03-09 09:28:08,977 INFO [train.py:898] (1/4) Epoch 17, batch 2050, loss[loss=0.1587, simple_loss=0.2353, pruned_loss=0.04102, over 18112.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2598, pruned_loss=0.04193, over 3571477.91 frames. ], batch size: 44, lr: 6.65e-03, grad_scale: 8.0 +2023-03-09 09:28:12,812 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60198.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:28:19,688 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60204.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:28:21,876 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60206.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:28:23,100 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6723, 5.1993, 5.1727, 5.1978, 4.7059, 5.0587, 4.4848, 5.0732], + device='cuda:1'), covar=tensor([0.0251, 0.0286, 0.0204, 0.0435, 0.0415, 0.0250, 0.1144, 0.0333], + device='cuda:1'), in_proj_covar=tensor([0.0202, 0.0248, 0.0239, 0.0302, 0.0258, 0.0251, 0.0298, 0.0245], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 09:29:07,879 INFO [train.py:898] (1/4) Epoch 17, batch 2100, loss[loss=0.2027, simple_loss=0.2858, pruned_loss=0.05979, over 18480.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2588, pruned_loss=0.04146, over 3581086.35 frames. ], batch size: 53, lr: 6.65e-03, grad_scale: 8.0 +2023-03-09 09:29:08,072 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60245.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:29:24,088 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60259.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:29:30,558 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.013e+02 2.803e+02 3.288e+02 3.914e+02 1.145e+03, threshold=6.576e+02, percent-clipped=2.0 +2023-03-09 09:30:04,335 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-09 09:30:07,000 INFO [train.py:898] (1/4) Epoch 17, batch 2150, loss[loss=0.1754, simple_loss=0.2693, pruned_loss=0.04075, over 18579.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2589, pruned_loss=0.04136, over 3581258.59 frames. ], batch size: 54, lr: 6.64e-03, grad_scale: 8.0 +2023-03-09 09:31:03,784 INFO [train.py:898] (1/4) Epoch 17, batch 2200, loss[loss=0.1934, simple_loss=0.2836, pruned_loss=0.05162, over 18470.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2589, pruned_loss=0.04139, over 3591216.13 frames. ], batch size: 59, lr: 6.64e-03, grad_scale: 8.0 +2023-03-09 09:31:26,300 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.966e+02 2.999e+02 3.869e+02 4.912e+02 1.337e+03, threshold=7.738e+02, percent-clipped=7.0 +2023-03-09 09:31:56,408 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.84 vs. limit=2.0 +2023-03-09 09:32:01,279 INFO [train.py:898] (1/4) Epoch 17, batch 2250, loss[loss=0.1534, simple_loss=0.2458, pruned_loss=0.03052, over 18505.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2589, pruned_loss=0.04138, over 3583743.02 frames. ], batch size: 47, lr: 6.64e-03, grad_scale: 4.0 +2023-03-09 09:32:02,637 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60396.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 09:32:16,912 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60408.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 09:32:31,475 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60421.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:32:47,126 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60434.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:32:58,978 INFO [train.py:898] (1/4) Epoch 17, batch 2300, loss[loss=0.1661, simple_loss=0.2517, pruned_loss=0.04021, over 18275.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2597, pruned_loss=0.04187, over 3553510.64 frames. ], batch size: 45, lr: 6.64e-03, grad_scale: 4.0 +2023-03-09 09:33:19,554 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2173, 4.3489, 2.7636, 4.2565, 5.3974, 3.0151, 4.0496, 4.1803], + device='cuda:1'), covar=tensor([0.0155, 0.1017, 0.1374, 0.0602, 0.0071, 0.1023, 0.0588, 0.0653], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0260, 0.0199, 0.0193, 0.0110, 0.0180, 0.0213, 0.0220], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 09:33:23,538 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.591e+02 2.714e+02 3.247e+02 3.774e+02 7.565e+02, threshold=6.493e+02, percent-clipped=0.0 +2023-03-09 09:33:41,513 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0113, 5.1032, 5.1729, 4.8159, 4.8991, 4.9006, 5.2051, 5.2469], + device='cuda:1'), covar=tensor([0.0069, 0.0056, 0.0049, 0.0098, 0.0053, 0.0156, 0.0062, 0.0076], + device='cuda:1'), in_proj_covar=tensor([0.0089, 0.0065, 0.0069, 0.0089, 0.0071, 0.0099, 0.0082, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 09:33:49,850 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5744, 5.5799, 5.1779, 5.4424, 5.4446, 4.8355, 5.3928, 5.1389], + device='cuda:1'), covar=tensor([0.0392, 0.0360, 0.1325, 0.0910, 0.0641, 0.0432, 0.0393, 0.0993], + device='cuda:1'), in_proj_covar=tensor([0.0463, 0.0523, 0.0679, 0.0415, 0.0417, 0.0484, 0.0513, 0.0644], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 09:33:57,583 INFO [train.py:898] (1/4) Epoch 17, batch 2350, loss[loss=0.1722, simple_loss=0.2636, pruned_loss=0.04044, over 18630.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2595, pruned_loss=0.04185, over 3549068.36 frames. ], batch size: 52, lr: 6.63e-03, grad_scale: 4.0 +2023-03-09 09:34:09,164 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60504.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:34:11,594 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4567, 2.7639, 2.3293, 2.9881, 3.4596, 3.4424, 3.0963, 2.9873], + device='cuda:1'), covar=tensor([0.0188, 0.0261, 0.0641, 0.0334, 0.0168, 0.0135, 0.0309, 0.0288], + device='cuda:1'), in_proj_covar=tensor([0.0135, 0.0128, 0.0161, 0.0152, 0.0122, 0.0106, 0.0151, 0.0148], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 09:34:40,298 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6940, 2.5269, 4.3741, 4.0755, 2.3350, 4.6629, 3.9342, 2.8842], + device='cuda:1'), covar=tensor([0.0439, 0.1953, 0.0288, 0.0307, 0.2097, 0.0242, 0.0523, 0.1333], + device='cuda:1'), in_proj_covar=tensor([0.0202, 0.0230, 0.0191, 0.0153, 0.0220, 0.0203, 0.0234, 0.0194], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 09:34:55,551 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4070, 2.6064, 2.4534, 2.8663, 3.3772, 3.3140, 2.9833, 2.8171], + device='cuda:1'), covar=tensor([0.0205, 0.0314, 0.0593, 0.0370, 0.0224, 0.0178, 0.0444, 0.0394], + device='cuda:1'), in_proj_covar=tensor([0.0134, 0.0127, 0.0160, 0.0152, 0.0121, 0.0106, 0.0151, 0.0148], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 09:34:56,323 INFO [train.py:898] (1/4) Epoch 17, batch 2400, loss[loss=0.178, simple_loss=0.2623, pruned_loss=0.0469, over 18269.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2589, pruned_loss=0.04152, over 3555033.93 frames. ], batch size: 60, lr: 6.63e-03, grad_scale: 8.0 +2023-03-09 09:34:56,663 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60545.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:34:59,057 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6611, 2.3686, 2.7190, 2.6672, 3.3375, 4.9035, 4.6940, 3.4872], + device='cuda:1'), covar=tensor([0.1572, 0.2186, 0.2726, 0.1677, 0.2058, 0.0186, 0.0358, 0.0773], + device='cuda:1'), in_proj_covar=tensor([0.0280, 0.0334, 0.0363, 0.0268, 0.0382, 0.0224, 0.0289, 0.0238], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 09:34:59,209 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.03 vs. limit=5.0 +2023-03-09 09:35:05,012 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60552.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:35:07,294 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60554.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:35:20,248 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.011e+02 2.942e+02 3.643e+02 4.208e+02 9.657e+02, threshold=7.287e+02, percent-clipped=4.0 +2023-03-09 09:35:37,582 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8581, 3.6601, 5.0209, 2.8788, 4.4426, 2.6481, 3.0938, 1.7835], + device='cuda:1'), covar=tensor([0.1073, 0.0867, 0.0179, 0.0857, 0.0521, 0.2344, 0.2587, 0.2087], + device='cuda:1'), in_proj_covar=tensor([0.0214, 0.0234, 0.0161, 0.0186, 0.0246, 0.0260, 0.0311, 0.0226], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 09:35:52,019 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60593.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:35:54,171 INFO [train.py:898] (1/4) Epoch 17, batch 2450, loss[loss=0.167, simple_loss=0.2577, pruned_loss=0.03813, over 18318.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2588, pruned_loss=0.04117, over 3563171.56 frames. ], batch size: 54, lr: 6.63e-03, grad_scale: 8.0 +2023-03-09 09:36:05,310 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 09:36:35,752 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6853, 3.4684, 2.4333, 4.3996, 3.3345, 4.3210, 2.5606, 4.0760], + device='cuda:1'), covar=tensor([0.0593, 0.0851, 0.1432, 0.0507, 0.0804, 0.0415, 0.1162, 0.0397], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0220, 0.0189, 0.0275, 0.0190, 0.0264, 0.0201, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 09:36:50,770 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8709, 3.6764, 4.9545, 2.9814, 4.3971, 2.6316, 3.1909, 1.8903], + device='cuda:1'), covar=tensor([0.1061, 0.0939, 0.0156, 0.0808, 0.0515, 0.2396, 0.2463, 0.1956], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0234, 0.0162, 0.0186, 0.0246, 0.0261, 0.0312, 0.0226], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 09:36:52,593 INFO [train.py:898] (1/4) Epoch 17, batch 2500, loss[loss=0.1548, simple_loss=0.2389, pruned_loss=0.03535, over 17647.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2581, pruned_loss=0.04106, over 3576798.45 frames. ], batch size: 39, lr: 6.63e-03, grad_scale: 8.0 +2023-03-09 09:37:17,218 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.797e+02 2.798e+02 3.205e+02 3.786e+02 6.512e+02, threshold=6.411e+02, percent-clipped=0.0 +2023-03-09 09:37:51,501 INFO [train.py:898] (1/4) Epoch 17, batch 2550, loss[loss=0.176, simple_loss=0.2652, pruned_loss=0.04335, over 18058.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2586, pruned_loss=0.04117, over 3567164.35 frames. ], batch size: 65, lr: 6.62e-03, grad_scale: 8.0 +2023-03-09 09:37:52,902 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60696.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:38:06,250 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60708.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:38:18,149 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60718.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:38:21,238 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60721.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:38:35,939 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60734.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:38:47,259 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60744.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:38:48,801 INFO [train.py:898] (1/4) Epoch 17, batch 2600, loss[loss=0.16, simple_loss=0.2399, pruned_loss=0.04005, over 18427.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2586, pruned_loss=0.04133, over 3568462.88 frames. ], batch size: 43, lr: 6.62e-03, grad_scale: 8.0 +2023-03-09 09:39:01,918 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60756.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:39:13,463 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.006e+02 2.726e+02 3.213e+02 3.687e+02 6.855e+02, threshold=6.427e+02, percent-clipped=2.0 +2023-03-09 09:39:16,910 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60769.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:39:28,285 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60779.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:39:31,188 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60782.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:39:45,274 INFO [train.py:898] (1/4) Epoch 17, batch 2650, loss[loss=0.1942, simple_loss=0.2803, pruned_loss=0.05404, over 18638.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2597, pruned_loss=0.04165, over 3570986.37 frames. ], batch size: 52, lr: 6.62e-03, grad_scale: 8.0 +2023-03-09 09:39:50,043 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1398, 3.1478, 2.0672, 3.8039, 2.6962, 3.6250, 2.3069, 3.1443], + device='cuda:1'), covar=tensor([0.0594, 0.0782, 0.1324, 0.0602, 0.0821, 0.0299, 0.1121, 0.0489], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0216, 0.0186, 0.0271, 0.0187, 0.0259, 0.0198, 0.0193], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 09:39:51,167 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5374, 3.4783, 2.2128, 4.3697, 3.0986, 4.3188, 2.5432, 3.8201], + device='cuda:1'), covar=tensor([0.0622, 0.0749, 0.1438, 0.0461, 0.0829, 0.0263, 0.1076, 0.0398], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0216, 0.0186, 0.0271, 0.0187, 0.0259, 0.0198, 0.0193], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 09:40:27,513 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-09 09:40:43,868 INFO [train.py:898] (1/4) Epoch 17, batch 2700, loss[loss=0.1939, simple_loss=0.2807, pruned_loss=0.05357, over 17909.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2601, pruned_loss=0.04184, over 3565298.63 frames. ], batch size: 65, lr: 6.61e-03, grad_scale: 8.0 +2023-03-09 09:40:54,815 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60854.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:41:08,391 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.851e+02 2.950e+02 3.306e+02 4.033e+02 9.458e+02, threshold=6.612e+02, percent-clipped=5.0 +2023-03-09 09:41:42,557 INFO [train.py:898] (1/4) Epoch 17, batch 2750, loss[loss=0.1458, simple_loss=0.2341, pruned_loss=0.0288, over 18249.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2593, pruned_loss=0.04149, over 3564994.72 frames. ], batch size: 45, lr: 6.61e-03, grad_scale: 8.0 +2023-03-09 09:41:51,130 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60902.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:42:37,230 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.4656, 2.1074, 2.3897, 2.4982, 3.0961, 4.6984, 4.5289, 3.1858], + device='cuda:1'), covar=tensor([0.1705, 0.2442, 0.3087, 0.1835, 0.2293, 0.0200, 0.0375, 0.0893], + device='cuda:1'), in_proj_covar=tensor([0.0281, 0.0334, 0.0365, 0.0269, 0.0384, 0.0225, 0.0289, 0.0240], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 09:42:41,195 INFO [train.py:898] (1/4) Epoch 17, batch 2800, loss[loss=0.1636, simple_loss=0.2578, pruned_loss=0.03472, over 18275.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.259, pruned_loss=0.04117, over 3574658.74 frames. ], batch size: 49, lr: 6.61e-03, grad_scale: 8.0 +2023-03-09 09:42:50,203 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-03-09 09:43:06,399 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.038e+02 2.738e+02 3.235e+02 3.720e+02 7.893e+02, threshold=6.471e+02, percent-clipped=2.0 +2023-03-09 09:43:12,489 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9285, 5.5077, 5.4919, 5.4664, 4.9751, 5.3785, 4.7760, 5.4068], + device='cuda:1'), covar=tensor([0.0269, 0.0296, 0.0190, 0.0376, 0.0412, 0.0223, 0.1148, 0.0292], + device='cuda:1'), in_proj_covar=tensor([0.0204, 0.0250, 0.0242, 0.0306, 0.0258, 0.0254, 0.0298, 0.0246], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 09:43:13,679 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5056, 3.4373, 3.3626, 2.8984, 3.1784, 2.6169, 2.5767, 3.4591], + device='cuda:1'), covar=tensor([0.0074, 0.0115, 0.0093, 0.0154, 0.0122, 0.0226, 0.0247, 0.0085], + device='cuda:1'), in_proj_covar=tensor([0.0125, 0.0149, 0.0127, 0.0179, 0.0131, 0.0170, 0.0174, 0.0110], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 09:43:40,129 INFO [train.py:898] (1/4) Epoch 17, batch 2850, loss[loss=0.1841, simple_loss=0.2763, pruned_loss=0.04601, over 18348.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2588, pruned_loss=0.04098, over 3587562.37 frames. ], batch size: 56, lr: 6.61e-03, grad_scale: 8.0 +2023-03-09 09:43:53,294 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7280, 2.4310, 4.3678, 3.9760, 2.3242, 4.5981, 3.8807, 2.8200], + device='cuda:1'), covar=tensor([0.0379, 0.2029, 0.0239, 0.0306, 0.2047, 0.0238, 0.0552, 0.1288], + device='cuda:1'), in_proj_covar=tensor([0.0204, 0.0233, 0.0191, 0.0153, 0.0219, 0.0204, 0.0236, 0.0195], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 09:43:53,736 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.49 vs. limit=5.0 +2023-03-09 09:44:21,037 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7340, 2.8243, 2.7536, 2.9848, 3.6881, 3.7098, 3.1915, 2.9530], + device='cuda:1'), covar=tensor([0.0169, 0.0252, 0.0463, 0.0301, 0.0157, 0.0116, 0.0300, 0.0329], + device='cuda:1'), in_proj_covar=tensor([0.0133, 0.0126, 0.0159, 0.0152, 0.0121, 0.0106, 0.0148, 0.0147], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 09:44:39,451 INFO [train.py:898] (1/4) Epoch 17, batch 2900, loss[loss=0.1683, simple_loss=0.258, pruned_loss=0.03935, over 18634.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2585, pruned_loss=0.04097, over 3580345.73 frames. ], batch size: 52, lr: 6.60e-03, grad_scale: 8.0 +2023-03-09 09:44:57,646 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61061.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:45:04,195 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.725e+02 2.779e+02 3.469e+02 4.268e+02 9.879e+02, threshold=6.938e+02, percent-clipped=3.0 +2023-03-09 09:45:13,448 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61074.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:45:19,185 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6802, 3.5801, 5.1048, 3.0735, 4.4837, 2.5103, 3.0460, 2.0298], + device='cuda:1'), covar=tensor([0.1128, 0.0901, 0.0120, 0.0690, 0.0485, 0.2541, 0.2729, 0.1897], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0234, 0.0162, 0.0186, 0.0246, 0.0261, 0.0312, 0.0227], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 09:45:37,774 INFO [train.py:898] (1/4) Epoch 17, batch 2950, loss[loss=0.1538, simple_loss=0.2402, pruned_loss=0.03365, over 18261.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2585, pruned_loss=0.04101, over 3583632.31 frames. ], batch size: 45, lr: 6.60e-03, grad_scale: 4.0 +2023-03-09 09:45:57,495 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-03-09 09:46:09,529 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61122.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:46:14,092 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6886, 2.9332, 4.3331, 3.6452, 2.6551, 4.4735, 4.0308, 2.8029], + device='cuda:1'), covar=tensor([0.0463, 0.1327, 0.0257, 0.0380, 0.1520, 0.0215, 0.0479, 0.0994], + device='cuda:1'), in_proj_covar=tensor([0.0204, 0.0232, 0.0192, 0.0152, 0.0219, 0.0205, 0.0235, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 09:46:36,122 INFO [train.py:898] (1/4) Epoch 17, batch 3000, loss[loss=0.1585, simple_loss=0.242, pruned_loss=0.03754, over 18548.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2588, pruned_loss=0.04101, over 3589620.93 frames. ], batch size: 45, lr: 6.60e-03, grad_scale: 4.0 +2023-03-09 09:46:36,122 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 09:46:48,252 INFO [train.py:932] (1/4) Epoch 17, validation: loss=0.1521, simple_loss=0.2525, pruned_loss=0.02589, over 944034.00 frames. +2023-03-09 09:46:48,253 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 09:46:56,094 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9230, 4.6125, 4.6992, 3.5391, 3.8577, 3.4106, 2.7090, 2.3346], + device='cuda:1'), covar=tensor([0.0200, 0.0134, 0.0072, 0.0298, 0.0325, 0.0239, 0.0722, 0.0949], + device='cuda:1'), in_proj_covar=tensor([0.0067, 0.0055, 0.0058, 0.0065, 0.0087, 0.0063, 0.0076, 0.0081], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 09:47:05,135 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.63 vs. limit=5.0 +2023-03-09 09:47:14,273 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.190e+02 3.156e+02 3.810e+02 4.470e+02 9.676e+02, threshold=7.619e+02, percent-clipped=4.0 +2023-03-09 09:47:15,754 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9730, 5.0433, 5.0591, 4.8644, 4.8470, 4.8769, 5.2179, 5.1212], + device='cuda:1'), covar=tensor([0.0067, 0.0075, 0.0053, 0.0111, 0.0058, 0.0145, 0.0089, 0.0114], + device='cuda:1'), in_proj_covar=tensor([0.0089, 0.0064, 0.0069, 0.0088, 0.0070, 0.0098, 0.0081, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 09:47:46,763 INFO [train.py:898] (1/4) Epoch 17, batch 3050, loss[loss=0.1547, simple_loss=0.2429, pruned_loss=0.03329, over 18502.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2584, pruned_loss=0.04096, over 3600837.15 frames. ], batch size: 47, lr: 6.60e-03, grad_scale: 4.0 +2023-03-09 09:48:44,427 INFO [train.py:898] (1/4) Epoch 17, batch 3100, loss[loss=0.2022, simple_loss=0.2969, pruned_loss=0.05371, over 18357.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.259, pruned_loss=0.0413, over 3606479.54 frames. ], batch size: 56, lr: 6.59e-03, grad_scale: 4.0 +2023-03-09 09:49:09,870 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.926e+02 2.710e+02 3.175e+02 4.043e+02 1.016e+03, threshold=6.350e+02, percent-clipped=3.0 +2023-03-09 09:49:38,290 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9525, 5.4652, 5.4158, 5.4139, 4.9254, 5.2967, 4.6977, 5.3306], + device='cuda:1'), covar=tensor([0.0233, 0.0247, 0.0187, 0.0361, 0.0398, 0.0237, 0.1156, 0.0267], + device='cuda:1'), in_proj_covar=tensor([0.0205, 0.0251, 0.0240, 0.0306, 0.0260, 0.0255, 0.0299, 0.0247], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 09:49:42,491 INFO [train.py:898] (1/4) Epoch 17, batch 3150, loss[loss=0.1523, simple_loss=0.2414, pruned_loss=0.03157, over 18368.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2585, pruned_loss=0.04098, over 3611340.60 frames. ], batch size: 46, lr: 6.59e-03, grad_scale: 4.0 +2023-03-09 09:50:40,783 INFO [train.py:898] (1/4) Epoch 17, batch 3200, loss[loss=0.1499, simple_loss=0.2296, pruned_loss=0.03507, over 18381.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2582, pruned_loss=0.04091, over 3606773.21 frames. ], batch size: 42, lr: 6.59e-03, grad_scale: 8.0 +2023-03-09 09:50:53,595 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.3794, 4.8004, 4.4508, 4.6533, 4.5206, 4.4222, 4.8947, 4.8044], + device='cuda:1'), covar=tensor([0.1080, 0.0742, 0.1873, 0.0659, 0.1304, 0.0690, 0.0629, 0.0677], + device='cuda:1'), in_proj_covar=tensor([0.0593, 0.0508, 0.0371, 0.0534, 0.0727, 0.0528, 0.0718, 0.0539], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 09:50:59,933 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6653, 2.8597, 4.4435, 3.5682, 2.5886, 4.6393, 4.0058, 2.8958], + device='cuda:1'), covar=tensor([0.0528, 0.1429, 0.0214, 0.0410, 0.1623, 0.0181, 0.0476, 0.0980], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0236, 0.0195, 0.0155, 0.0222, 0.0207, 0.0238, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 09:51:06,761 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.872e+02 2.721e+02 3.197e+02 3.884e+02 6.601e+02, threshold=6.394e+02, percent-clipped=2.0 +2023-03-09 09:51:14,989 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=61374.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:51:22,922 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7824, 5.2491, 5.2308, 5.2412, 4.7796, 5.0951, 4.5041, 5.1354], + device='cuda:1'), covar=tensor([0.0240, 0.0277, 0.0205, 0.0394, 0.0397, 0.0253, 0.1091, 0.0307], + device='cuda:1'), in_proj_covar=tensor([0.0206, 0.0253, 0.0243, 0.0308, 0.0262, 0.0257, 0.0301, 0.0248], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 09:51:32,844 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-03-09 09:51:38,953 INFO [train.py:898] (1/4) Epoch 17, batch 3250, loss[loss=0.2079, simple_loss=0.2894, pruned_loss=0.0632, over 12613.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.259, pruned_loss=0.0414, over 3590425.63 frames. ], batch size: 130, lr: 6.59e-03, grad_scale: 8.0 +2023-03-09 09:52:05,259 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61417.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:52:10,906 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=61422.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:52:24,277 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5639, 3.0439, 4.2500, 3.6147, 2.6281, 4.5160, 3.8505, 2.7422], + device='cuda:1'), covar=tensor([0.0522, 0.1258, 0.0230, 0.0376, 0.1492, 0.0170, 0.0520, 0.1017], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0237, 0.0197, 0.0156, 0.0225, 0.0208, 0.0241, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 09:52:37,388 INFO [train.py:898] (1/4) Epoch 17, batch 3300, loss[loss=0.1766, simple_loss=0.2606, pruned_loss=0.04631, over 18343.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2594, pruned_loss=0.04165, over 3583221.47 frames. ], batch size: 56, lr: 6.58e-03, grad_scale: 8.0 +2023-03-09 09:53:02,170 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.987e+02 2.924e+02 3.407e+02 4.178e+02 8.081e+02, threshold=6.814e+02, percent-clipped=6.0 +2023-03-09 09:53:35,247 INFO [train.py:898] (1/4) Epoch 17, batch 3350, loss[loss=0.1562, simple_loss=0.2404, pruned_loss=0.03605, over 18430.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2597, pruned_loss=0.04167, over 3586155.85 frames. ], batch size: 42, lr: 6.58e-03, grad_scale: 8.0 +2023-03-09 09:53:56,151 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6035, 3.5688, 3.4049, 3.0707, 3.3249, 2.7287, 2.6562, 3.6072], + device='cuda:1'), covar=tensor([0.0051, 0.0079, 0.0072, 0.0113, 0.0079, 0.0165, 0.0193, 0.0051], + device='cuda:1'), in_proj_covar=tensor([0.0125, 0.0147, 0.0126, 0.0176, 0.0131, 0.0169, 0.0173, 0.0110], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 09:54:33,217 INFO [train.py:898] (1/4) Epoch 17, batch 3400, loss[loss=0.1714, simple_loss=0.2587, pruned_loss=0.04203, over 16136.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2598, pruned_loss=0.04173, over 3589461.83 frames. ], batch size: 95, lr: 6.58e-03, grad_scale: 8.0 +2023-03-09 09:54:45,568 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7994, 3.2204, 4.3487, 3.7656, 2.8703, 4.7067, 4.0652, 2.8622], + device='cuda:1'), covar=tensor([0.0456, 0.1142, 0.0250, 0.0388, 0.1422, 0.0183, 0.0456, 0.0995], + device='cuda:1'), in_proj_covar=tensor([0.0208, 0.0235, 0.0196, 0.0154, 0.0222, 0.0206, 0.0239, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 09:54:50,293 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-09 09:54:58,507 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.971e+02 2.720e+02 3.056e+02 3.431e+02 6.999e+02, threshold=6.113e+02, percent-clipped=1.0 +2023-03-09 09:55:31,068 INFO [train.py:898] (1/4) Epoch 17, batch 3450, loss[loss=0.151, simple_loss=0.2405, pruned_loss=0.0308, over 18392.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2593, pruned_loss=0.04156, over 3588154.62 frames. ], batch size: 50, lr: 6.57e-03, grad_scale: 8.0 +2023-03-09 09:55:32,662 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7515, 3.9532, 2.4010, 3.8373, 4.8923, 2.4824, 3.6602, 3.7345], + device='cuda:1'), covar=tensor([0.0165, 0.1054, 0.1601, 0.0637, 0.0086, 0.1227, 0.0696, 0.0782], + device='cuda:1'), in_proj_covar=tensor([0.0152, 0.0259, 0.0199, 0.0192, 0.0112, 0.0178, 0.0212, 0.0219], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 09:56:03,331 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7880, 4.4656, 4.5362, 3.3257, 3.7845, 3.4862, 2.5675, 2.2556], + device='cuda:1'), covar=tensor([0.0245, 0.0171, 0.0073, 0.0317, 0.0310, 0.0221, 0.0759, 0.0923], + device='cuda:1'), in_proj_covar=tensor([0.0067, 0.0055, 0.0058, 0.0065, 0.0085, 0.0062, 0.0075, 0.0081], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 09:56:28,401 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.80 vs. limit=5.0 +2023-03-09 09:56:30,010 INFO [train.py:898] (1/4) Epoch 17, batch 3500, loss[loss=0.2453, simple_loss=0.3198, pruned_loss=0.08538, over 12048.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2572, pruned_loss=0.04095, over 3592645.23 frames. ], batch size: 131, lr: 6.57e-03, grad_scale: 8.0 +2023-03-09 09:56:56,119 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.780e+02 2.828e+02 3.319e+02 4.046e+02 7.174e+02, threshold=6.638e+02, percent-clipped=2.0 +2023-03-09 09:57:20,648 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-09 09:57:25,970 INFO [train.py:898] (1/4) Epoch 17, batch 3550, loss[loss=0.1631, simple_loss=0.2503, pruned_loss=0.03792, over 18485.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2575, pruned_loss=0.0409, over 3599251.88 frames. ], batch size: 53, lr: 6.57e-03, grad_scale: 8.0 +2023-03-09 09:57:50,664 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=61717.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:58:07,025 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61732.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:58:18,921 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61743.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:58:20,834 INFO [train.py:898] (1/4) Epoch 17, batch 3600, loss[loss=0.1587, simple_loss=0.25, pruned_loss=0.03368, over 18376.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2566, pruned_loss=0.0407, over 3596936.85 frames. ], batch size: 50, lr: 6.57e-03, grad_scale: 8.0 +2023-03-09 09:58:23,370 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.2343, 3.9554, 5.2181, 3.0594, 4.6492, 2.8337, 3.2455, 1.9648], + device='cuda:1'), covar=tensor([0.0956, 0.0809, 0.0119, 0.0848, 0.0448, 0.2374, 0.2512, 0.2036], + device='cuda:1'), in_proj_covar=tensor([0.0217, 0.0238, 0.0168, 0.0189, 0.0250, 0.0265, 0.0318, 0.0228], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 09:58:42,535 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=61765.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:58:44,427 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.882e+02 3.033e+02 3.500e+02 4.086e+02 7.199e+02, threshold=7.000e+02, percent-clipped=1.0 +2023-03-09 09:59:23,036 INFO [train.py:898] (1/4) Epoch 18, batch 0, loss[loss=0.1835, simple_loss=0.2774, pruned_loss=0.04477, over 18469.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2774, pruned_loss=0.04477, over 18469.00 frames. ], batch size: 53, lr: 6.38e-03, grad_scale: 8.0 +2023-03-09 09:59:23,037 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 09:59:34,863 INFO [train.py:932] (1/4) Epoch 18, validation: loss=0.1526, simple_loss=0.2531, pruned_loss=0.0261, over 944034.00 frames. +2023-03-09 09:59:34,864 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 09:59:50,870 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61793.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:00:05,781 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61804.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:00:34,726 INFO [train.py:898] (1/4) Epoch 18, batch 50, loss[loss=0.1639, simple_loss=0.2518, pruned_loss=0.03798, over 18267.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.2565, pruned_loss=0.03953, over 804627.71 frames. ], batch size: 49, lr: 6.37e-03, grad_scale: 8.0 +2023-03-09 10:01:20,171 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.961e+02 2.892e+02 3.419e+02 4.081e+02 7.756e+02, threshold=6.838e+02, percent-clipped=1.0 +2023-03-09 10:01:33,974 INFO [train.py:898] (1/4) Epoch 18, batch 100, loss[loss=0.2049, simple_loss=0.2982, pruned_loss=0.05581, over 18359.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.257, pruned_loss=0.0394, over 1428451.13 frames. ], batch size: 56, lr: 6.37e-03, grad_scale: 8.0 +2023-03-09 10:02:03,161 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-09 10:02:15,367 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61914.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:02:32,589 INFO [train.py:898] (1/4) Epoch 18, batch 150, loss[loss=0.1664, simple_loss=0.2604, pruned_loss=0.03617, over 18359.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2585, pruned_loss=0.03994, over 1914113.84 frames. ], batch size: 55, lr: 6.37e-03, grad_scale: 8.0 +2023-03-09 10:02:49,351 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.86 vs. limit=5.0 +2023-03-09 10:03:17,466 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.774e+02 2.985e+02 3.348e+02 3.997e+02 9.242e+02, threshold=6.695e+02, percent-clipped=1.0 +2023-03-09 10:03:27,659 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61975.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:03:32,034 INFO [train.py:898] (1/4) Epoch 18, batch 200, loss[loss=0.179, simple_loss=0.2746, pruned_loss=0.04168, over 18128.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2561, pruned_loss=0.03898, over 2296825.41 frames. ], batch size: 62, lr: 6.37e-03, grad_scale: 8.0 +2023-03-09 10:04:35,255 INFO [train.py:898] (1/4) Epoch 18, batch 250, loss[loss=0.1755, simple_loss=0.2683, pruned_loss=0.04135, over 18395.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.2566, pruned_loss=0.03932, over 2584000.74 frames. ], batch size: 52, lr: 6.36e-03, grad_scale: 8.0 +2023-03-09 10:04:38,930 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0610, 5.3905, 2.6143, 5.2175, 5.2042, 5.4457, 5.2604, 2.6197], + device='cuda:1'), covar=tensor([0.0181, 0.0064, 0.0799, 0.0092, 0.0063, 0.0067, 0.0077, 0.0999], + device='cuda:1'), in_proj_covar=tensor([0.0084, 0.0078, 0.0094, 0.0091, 0.0083, 0.0072, 0.0083, 0.0094], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 10:04:40,218 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6291, 3.4525, 4.7909, 4.2098, 3.1868, 2.8557, 4.2537, 4.9428], + device='cuda:1'), covar=tensor([0.0804, 0.1480, 0.0162, 0.0405, 0.0918, 0.1201, 0.0371, 0.0242], + device='cuda:1'), in_proj_covar=tensor([0.0145, 0.0268, 0.0138, 0.0177, 0.0187, 0.0188, 0.0190, 0.0184], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 10:04:45,073 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-09 10:05:14,878 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6160, 3.3865, 2.0599, 4.4211, 3.2967, 4.1030, 2.1597, 3.8391], + device='cuda:1'), covar=tensor([0.0544, 0.0743, 0.1455, 0.0539, 0.0697, 0.0290, 0.1416, 0.0430], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0221, 0.0187, 0.0273, 0.0193, 0.0262, 0.0200, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 10:05:19,646 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.960e+02 2.782e+02 3.298e+02 4.084e+02 9.101e+02, threshold=6.597e+02, percent-clipped=2.0 +2023-03-09 10:05:34,840 INFO [train.py:898] (1/4) Epoch 18, batch 300, loss[loss=0.1603, simple_loss=0.2486, pruned_loss=0.036, over 17630.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.2568, pruned_loss=0.03938, over 2801789.68 frames. ], batch size: 70, lr: 6.36e-03, grad_scale: 8.0 +2023-03-09 10:05:45,154 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62088.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:05:57,595 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62099.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:06:24,900 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62122.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:06:33,284 INFO [train.py:898] (1/4) Epoch 18, batch 350, loss[loss=0.1926, simple_loss=0.2783, pruned_loss=0.05345, over 18274.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2582, pruned_loss=0.04001, over 2987264.22 frames. ], batch size: 57, lr: 6.36e-03, grad_scale: 8.0 +2023-03-09 10:06:35,440 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 10:07:17,194 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.168e+02 2.722e+02 3.288e+02 4.143e+02 7.377e+02, threshold=6.576e+02, percent-clipped=3.0 +2023-03-09 10:07:32,441 INFO [train.py:898] (1/4) Epoch 18, batch 400, loss[loss=0.1902, simple_loss=0.2859, pruned_loss=0.0472, over 18358.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2579, pruned_loss=0.04002, over 3134282.77 frames. ], batch size: 56, lr: 6.36e-03, grad_scale: 8.0 +2023-03-09 10:07:37,218 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62183.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:08:26,311 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.80 vs. limit=5.0 +2023-03-09 10:08:30,220 INFO [train.py:898] (1/4) Epoch 18, batch 450, loss[loss=0.1605, simple_loss=0.2545, pruned_loss=0.03328, over 18396.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2574, pruned_loss=0.03996, over 3231929.95 frames. ], batch size: 52, lr: 6.35e-03, grad_scale: 8.0 +2023-03-09 10:08:32,201 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 10:09:15,161 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.738e+02 2.782e+02 3.170e+02 3.804e+02 6.822e+02, threshold=6.341e+02, percent-clipped=2.0 +2023-03-09 10:09:18,887 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62270.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:09:22,433 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62273.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:09:26,299 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6859, 4.3465, 4.4250, 3.3732, 3.6110, 3.3678, 2.5430, 2.3221], + device='cuda:1'), covar=tensor([0.0233, 0.0180, 0.0077, 0.0300, 0.0343, 0.0236, 0.0738, 0.0897], + device='cuda:1'), in_proj_covar=tensor([0.0067, 0.0056, 0.0058, 0.0066, 0.0087, 0.0064, 0.0076, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 10:09:29,824 INFO [train.py:898] (1/4) Epoch 18, batch 500, loss[loss=0.1593, simple_loss=0.2484, pruned_loss=0.03508, over 18346.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2579, pruned_loss=0.0403, over 3312313.59 frames. ], batch size: 46, lr: 6.35e-03, grad_scale: 8.0 +2023-03-09 10:10:28,265 INFO [train.py:898] (1/4) Epoch 18, batch 550, loss[loss=0.1328, simple_loss=0.2221, pruned_loss=0.02176, over 18417.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2575, pruned_loss=0.03993, over 3386300.02 frames. ], batch size: 43, lr: 6.35e-03, grad_scale: 8.0 +2023-03-09 10:10:34,904 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62334.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:11:13,351 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.182e+02 2.712e+02 3.173e+02 3.739e+02 5.690e+02, threshold=6.346e+02, percent-clipped=0.0 +2023-03-09 10:11:27,633 INFO [train.py:898] (1/4) Epoch 18, batch 600, loss[loss=0.2054, simple_loss=0.2762, pruned_loss=0.06726, over 12642.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2576, pruned_loss=0.04003, over 3431612.41 frames. ], batch size: 130, lr: 6.35e-03, grad_scale: 8.0 +2023-03-09 10:11:38,900 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62388.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:11:43,041 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62391.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:11:52,680 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62399.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:12:26,537 INFO [train.py:898] (1/4) Epoch 18, batch 650, loss[loss=0.1898, simple_loss=0.2839, pruned_loss=0.0478, over 18485.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2582, pruned_loss=0.04043, over 3461605.73 frames. ], batch size: 53, lr: 6.34e-03, grad_scale: 8.0 +2023-03-09 10:12:35,771 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62436.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:12:48,551 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62447.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:12:55,141 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62452.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:13:12,086 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.651e+02 2.765e+02 3.266e+02 3.962e+02 6.380e+02, threshold=6.532e+02, percent-clipped=1.0 +2023-03-09 10:13:24,731 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62478.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:13:25,698 INFO [train.py:898] (1/4) Epoch 18, batch 700, loss[loss=0.1491, simple_loss=0.2318, pruned_loss=0.03315, over 18343.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2576, pruned_loss=0.04027, over 3483837.48 frames. ], batch size: 46, lr: 6.34e-03, grad_scale: 8.0 +2023-03-09 10:14:21,586 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.7273, 6.2073, 5.6667, 6.0673, 5.8406, 5.7657, 6.3245, 6.2269], + device='cuda:1'), covar=tensor([0.1105, 0.0730, 0.0378, 0.0597, 0.1317, 0.0613, 0.0491, 0.0657], + device='cuda:1'), in_proj_covar=tensor([0.0583, 0.0499, 0.0366, 0.0525, 0.0721, 0.0522, 0.0706, 0.0535], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 10:14:24,863 INFO [train.py:898] (1/4) Epoch 18, batch 750, loss[loss=0.1547, simple_loss=0.2468, pruned_loss=0.03126, over 18396.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2568, pruned_loss=0.04001, over 3516835.02 frames. ], batch size: 50, lr: 6.34e-03, grad_scale: 4.0 +2023-03-09 10:14:44,184 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62545.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:14:57,327 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2078, 4.3591, 2.6985, 4.2893, 5.4972, 2.8978, 4.1406, 4.1139], + device='cuda:1'), covar=tensor([0.0123, 0.1040, 0.1414, 0.0550, 0.0049, 0.1044, 0.0537, 0.0705], + device='cuda:1'), in_proj_covar=tensor([0.0152, 0.0256, 0.0198, 0.0192, 0.0111, 0.0177, 0.0209, 0.0218], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 10:15:10,947 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.733e+02 2.727e+02 3.281e+02 3.903e+02 8.552e+02, threshold=6.561e+02, percent-clipped=5.0 +2023-03-09 10:15:13,420 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62570.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:15:23,436 INFO [train.py:898] (1/4) Epoch 18, batch 800, loss[loss=0.1562, simple_loss=0.2351, pruned_loss=0.03864, over 17697.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2569, pruned_loss=0.04029, over 3534115.47 frames. ], batch size: 39, lr: 6.34e-03, grad_scale: 8.0 +2023-03-09 10:15:30,659 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62585.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:15:56,930 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62606.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:16:10,962 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62618.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:16:23,375 INFO [train.py:898] (1/4) Epoch 18, batch 850, loss[loss=0.1749, simple_loss=0.273, pruned_loss=0.03837, over 17831.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2568, pruned_loss=0.04011, over 3551166.39 frames. ], batch size: 70, lr: 6.33e-03, grad_scale: 8.0 +2023-03-09 10:16:23,582 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62629.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:16:42,865 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5923, 2.8894, 4.2205, 3.6647, 2.8105, 4.4898, 3.9716, 2.8063], + device='cuda:1'), covar=tensor([0.0503, 0.1346, 0.0240, 0.0370, 0.1377, 0.0206, 0.0430, 0.1001], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0233, 0.0195, 0.0154, 0.0221, 0.0207, 0.0237, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 10:16:44,082 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62646.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:16:49,760 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9962, 5.4907, 5.4466, 5.4529, 4.9732, 5.3598, 4.8287, 5.3751], + device='cuda:1'), covar=tensor([0.0202, 0.0233, 0.0174, 0.0370, 0.0354, 0.0198, 0.0967, 0.0258], + device='cuda:1'), in_proj_covar=tensor([0.0204, 0.0248, 0.0240, 0.0305, 0.0258, 0.0254, 0.0296, 0.0245], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 10:17:07,972 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62666.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:17:09,801 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.769e+02 2.717e+02 3.249e+02 3.987e+02 1.142e+03, threshold=6.498e+02, percent-clipped=2.0 +2023-03-09 10:17:22,616 INFO [train.py:898] (1/4) Epoch 18, batch 900, loss[loss=0.1378, simple_loss=0.2168, pruned_loss=0.02937, over 18459.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2564, pruned_loss=0.03991, over 3568338.70 frames. ], batch size: 43, lr: 6.33e-03, grad_scale: 8.0 +2023-03-09 10:17:40,047 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7378, 5.5994, 5.2638, 5.4779, 4.8823, 5.4157, 5.7434, 5.5745], + device='cuda:1'), covar=tensor([0.2324, 0.1321, 0.0888, 0.1266, 0.2515, 0.1115, 0.1054, 0.1330], + device='cuda:1'), in_proj_covar=tensor([0.0584, 0.0497, 0.0364, 0.0525, 0.0719, 0.0523, 0.0709, 0.0539], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 10:17:55,574 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.93 vs. limit=5.0 +2023-03-09 10:18:19,032 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.19 vs. limit=5.0 +2023-03-09 10:18:19,840 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62727.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:18:21,831 INFO [train.py:898] (1/4) Epoch 18, batch 950, loss[loss=0.1626, simple_loss=0.2567, pruned_loss=0.0342, over 18492.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2566, pruned_loss=0.03992, over 3569234.44 frames. ], batch size: 51, lr: 6.33e-03, grad_scale: 8.0 +2023-03-09 10:18:41,360 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-09 10:18:43,181 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62747.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:19:07,975 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.629e+02 2.750e+02 3.313e+02 3.832e+02 8.841e+02, threshold=6.625e+02, percent-clipped=2.0 +2023-03-09 10:19:14,112 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62773.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:19:20,115 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62778.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:19:21,058 INFO [train.py:898] (1/4) Epoch 18, batch 1000, loss[loss=0.1871, simple_loss=0.2769, pruned_loss=0.04867, over 18349.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.2569, pruned_loss=0.03985, over 3579011.54 frames. ], batch size: 56, lr: 6.33e-03, grad_scale: 8.0 +2023-03-09 10:19:44,166 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3418, 2.5478, 3.7698, 3.4768, 2.5241, 3.9514, 3.6412, 2.7356], + device='cuda:1'), covar=tensor([0.0503, 0.1404, 0.0280, 0.0357, 0.1467, 0.0243, 0.0518, 0.0925], + device='cuda:1'), in_proj_covar=tensor([0.0207, 0.0234, 0.0195, 0.0154, 0.0222, 0.0207, 0.0238, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 10:20:16,376 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62826.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:20:19,780 INFO [train.py:898] (1/4) Epoch 18, batch 1050, loss[loss=0.1383, simple_loss=0.2269, pruned_loss=0.02481, over 18236.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.2568, pruned_loss=0.03989, over 3553632.97 frames. ], batch size: 45, lr: 6.32e-03, grad_scale: 8.0 +2023-03-09 10:20:26,350 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62834.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:21:05,608 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.820e+02 2.730e+02 3.063e+02 3.769e+02 8.307e+02, threshold=6.126e+02, percent-clipped=1.0 +2023-03-09 10:21:18,727 INFO [train.py:898] (1/4) Epoch 18, batch 1100, loss[loss=0.1685, simple_loss=0.2601, pruned_loss=0.03843, over 18415.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2563, pruned_loss=0.03971, over 3569735.57 frames. ], batch size: 52, lr: 6.32e-03, grad_scale: 8.0 +2023-03-09 10:21:31,777 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.82 vs. limit=5.0 +2023-03-09 10:21:44,746 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62901.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:22:17,694 INFO [train.py:898] (1/4) Epoch 18, batch 1150, loss[loss=0.1869, simple_loss=0.28, pruned_loss=0.0469, over 18362.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2571, pruned_loss=0.03993, over 3584485.60 frames. ], batch size: 56, lr: 6.32e-03, grad_scale: 8.0 +2023-03-09 10:22:18,048 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62929.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:22:32,030 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62941.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:23:03,230 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.305e+02 2.742e+02 3.290e+02 3.886e+02 6.757e+02, threshold=6.580e+02, percent-clipped=2.0 +2023-03-09 10:23:10,928 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.20 vs. limit=5.0 +2023-03-09 10:23:13,913 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62977.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:23:15,924 INFO [train.py:898] (1/4) Epoch 18, batch 1200, loss[loss=0.1715, simple_loss=0.2641, pruned_loss=0.03947, over 18484.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2571, pruned_loss=0.04011, over 3586698.13 frames. ], batch size: 51, lr: 6.32e-03, grad_scale: 8.0 +2023-03-09 10:24:06,506 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63022.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:24:14,843 INFO [train.py:898] (1/4) Epoch 18, batch 1250, loss[loss=0.1418, simple_loss=0.2313, pruned_loss=0.0261, over 18268.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2575, pruned_loss=0.04025, over 3590388.79 frames. ], batch size: 47, lr: 6.31e-03, grad_scale: 8.0 +2023-03-09 10:24:36,015 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63047.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:25:00,073 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.748e+02 2.862e+02 3.402e+02 4.233e+02 9.717e+02, threshold=6.805e+02, percent-clipped=8.0 +2023-03-09 10:25:13,514 INFO [train.py:898] (1/4) Epoch 18, batch 1300, loss[loss=0.1506, simple_loss=0.2346, pruned_loss=0.03332, over 18139.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2575, pruned_loss=0.04038, over 3587244.18 frames. ], batch size: 44, lr: 6.31e-03, grad_scale: 8.0 +2023-03-09 10:25:32,656 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63095.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:25:48,229 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-09 10:26:03,176 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6120, 3.7969, 5.0107, 4.4670, 3.4155, 3.0468, 4.5093, 5.2677], + device='cuda:1'), covar=tensor([0.0785, 0.1385, 0.0183, 0.0361, 0.0799, 0.1063, 0.0313, 0.0184], + device='cuda:1'), in_proj_covar=tensor([0.0146, 0.0269, 0.0139, 0.0177, 0.0186, 0.0187, 0.0190, 0.0186], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 10:26:12,345 INFO [train.py:898] (1/4) Epoch 18, batch 1350, loss[loss=0.1573, simple_loss=0.2369, pruned_loss=0.03887, over 18345.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2572, pruned_loss=0.04028, over 3581986.80 frames. ], batch size: 46, lr: 6.31e-03, grad_scale: 8.0 +2023-03-09 10:26:13,174 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63129.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:26:58,485 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.671e+02 2.877e+02 3.417e+02 4.263e+02 7.577e+02, threshold=6.833e+02, percent-clipped=1.0 +2023-03-09 10:27:11,775 INFO [train.py:898] (1/4) Epoch 18, batch 1400, loss[loss=0.1941, simple_loss=0.2874, pruned_loss=0.05039, over 18365.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2579, pruned_loss=0.04033, over 3568052.22 frames. ], batch size: 56, lr: 6.31e-03, grad_scale: 8.0 +2023-03-09 10:27:18,501 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5516, 3.4858, 2.1907, 4.5177, 3.1548, 4.4415, 2.3892, 4.0550], + device='cuda:1'), covar=tensor([0.0598, 0.0721, 0.1398, 0.0389, 0.0814, 0.0304, 0.1155, 0.0375], + device='cuda:1'), in_proj_covar=tensor([0.0206, 0.0220, 0.0183, 0.0270, 0.0190, 0.0259, 0.0198, 0.0194], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 10:27:39,000 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63201.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:28:02,558 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63221.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:28:05,856 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5838, 6.1379, 5.6368, 5.9233, 5.7268, 5.5445, 6.2093, 6.1313], + device='cuda:1'), covar=tensor([0.1198, 0.0716, 0.0421, 0.0703, 0.1333, 0.0696, 0.0478, 0.0667], + device='cuda:1'), in_proj_covar=tensor([0.0593, 0.0508, 0.0368, 0.0539, 0.0732, 0.0535, 0.0720, 0.0546], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 10:28:11,965 INFO [train.py:898] (1/4) Epoch 18, batch 1450, loss[loss=0.1501, simple_loss=0.2385, pruned_loss=0.03087, over 18554.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2569, pruned_loss=0.03994, over 3566212.17 frames. ], batch size: 49, lr: 6.30e-03, grad_scale: 8.0 +2023-03-09 10:28:26,293 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63241.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:28:35,843 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63249.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:28:57,055 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5497, 2.9765, 4.3287, 3.6883, 2.7043, 4.5867, 3.9940, 2.9297], + device='cuda:1'), covar=tensor([0.0519, 0.1366, 0.0235, 0.0405, 0.1556, 0.0218, 0.0502, 0.0989], + device='cuda:1'), in_proj_covar=tensor([0.0208, 0.0234, 0.0197, 0.0156, 0.0223, 0.0207, 0.0238, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 10:28:57,799 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.907e+02 2.745e+02 3.338e+02 4.107e+02 1.231e+03, threshold=6.676e+02, percent-clipped=2.0 +2023-03-09 10:29:10,024 INFO [train.py:898] (1/4) Epoch 18, batch 1500, loss[loss=0.1653, simple_loss=0.2599, pruned_loss=0.03535, over 18470.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2568, pruned_loss=0.04005, over 3557725.85 frames. ], batch size: 59, lr: 6.30e-03, grad_scale: 8.0 +2023-03-09 10:29:14,273 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63282.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:29:22,579 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63289.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:30:01,191 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63322.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:30:09,223 INFO [train.py:898] (1/4) Epoch 18, batch 1550, loss[loss=0.1715, simple_loss=0.2609, pruned_loss=0.0411, over 18207.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2569, pruned_loss=0.04024, over 3549991.07 frames. ], batch size: 60, lr: 6.30e-03, grad_scale: 8.0 +2023-03-09 10:30:16,546 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5712, 2.7128, 2.3646, 2.8488, 3.5971, 3.4646, 3.0258, 2.8354], + device='cuda:1'), covar=tensor([0.0216, 0.0324, 0.0713, 0.0473, 0.0195, 0.0196, 0.0471, 0.0436], + device='cuda:1'), in_proj_covar=tensor([0.0132, 0.0129, 0.0159, 0.0151, 0.0120, 0.0109, 0.0149, 0.0149], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 10:30:55,738 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.939e+02 2.883e+02 3.563e+02 4.171e+02 7.864e+02, threshold=7.125e+02, percent-clipped=2.0 +2023-03-09 10:30:58,245 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63370.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:31:08,381 INFO [train.py:898] (1/4) Epoch 18, batch 1600, loss[loss=0.1867, simple_loss=0.2791, pruned_loss=0.04717, over 18227.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2573, pruned_loss=0.04013, over 3563625.45 frames. ], batch size: 60, lr: 6.30e-03, grad_scale: 8.0 +2023-03-09 10:31:31,693 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1187, 5.1447, 5.1412, 4.8638, 4.8673, 4.9754, 5.2697, 5.2307], + device='cuda:1'), covar=tensor([0.0056, 0.0063, 0.0054, 0.0097, 0.0058, 0.0135, 0.0065, 0.0096], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0065, 0.0070, 0.0089, 0.0072, 0.0099, 0.0084, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 10:31:50,515 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5580, 2.1953, 2.4571, 2.7272, 2.9091, 5.0146, 4.7759, 3.5187], + device='cuda:1'), covar=tensor([0.1973, 0.3084, 0.3630, 0.2027, 0.3498, 0.0228, 0.0420, 0.0918], + device='cuda:1'), in_proj_covar=tensor([0.0287, 0.0337, 0.0368, 0.0272, 0.0388, 0.0226, 0.0291, 0.0244], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 10:31:51,422 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3369, 5.3552, 4.9645, 5.2837, 5.2955, 4.6369, 5.1571, 4.9481], + device='cuda:1'), covar=tensor([0.0446, 0.0442, 0.1303, 0.0774, 0.0655, 0.0428, 0.0454, 0.1068], + device='cuda:1'), in_proj_covar=tensor([0.0472, 0.0532, 0.0686, 0.0419, 0.0425, 0.0489, 0.0524, 0.0649], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 10:31:56,906 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5537, 6.1270, 5.5987, 5.8919, 5.7029, 5.5491, 6.1828, 6.0817], + device='cuda:1'), covar=tensor([0.1092, 0.0663, 0.0401, 0.0684, 0.1257, 0.0696, 0.0491, 0.0654], + device='cuda:1'), in_proj_covar=tensor([0.0592, 0.0503, 0.0367, 0.0536, 0.0728, 0.0532, 0.0716, 0.0545], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 10:31:58,079 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63420.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:32:00,339 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63422.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:32:08,052 INFO [train.py:898] (1/4) Epoch 18, batch 1650, loss[loss=0.1765, simple_loss=0.2605, pruned_loss=0.04629, over 18308.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2571, pruned_loss=0.04012, over 3570210.23 frames. ], batch size: 49, lr: 6.29e-03, grad_scale: 8.0 +2023-03-09 10:32:08,357 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63429.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:32:16,363 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8834, 4.6553, 4.7675, 3.5089, 3.7332, 3.4781, 2.6279, 2.5464], + device='cuda:1'), covar=tensor([0.0208, 0.0147, 0.0063, 0.0270, 0.0364, 0.0211, 0.0701, 0.0782], + device='cuda:1'), in_proj_covar=tensor([0.0068, 0.0056, 0.0059, 0.0066, 0.0087, 0.0064, 0.0075, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 10:32:17,536 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8179, 4.0358, 2.4033, 4.0349, 5.1349, 2.5093, 3.7051, 4.1216], + device='cuda:1'), covar=tensor([0.0155, 0.1225, 0.1556, 0.0593, 0.0068, 0.1215, 0.0684, 0.0629], + device='cuda:1'), in_proj_covar=tensor([0.0155, 0.0260, 0.0201, 0.0195, 0.0113, 0.0180, 0.0213, 0.0221], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 10:32:19,403 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 10:32:51,712 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-09 10:32:54,720 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.082e+02 2.837e+02 3.509e+02 4.099e+02 8.239e+02, threshold=7.017e+02, percent-clipped=1.0 +2023-03-09 10:33:05,389 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63477.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:33:07,403 INFO [train.py:898] (1/4) Epoch 18, batch 1700, loss[loss=0.1676, simple_loss=0.2457, pruned_loss=0.04479, over 17691.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.2564, pruned_loss=0.04007, over 3574145.44 frames. ], batch size: 39, lr: 6.29e-03, grad_scale: 8.0 +2023-03-09 10:33:10,138 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63481.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:33:12,261 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63483.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:34:06,941 INFO [train.py:898] (1/4) Epoch 18, batch 1750, loss[loss=0.2041, simple_loss=0.2872, pruned_loss=0.06046, over 12647.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2569, pruned_loss=0.04021, over 3559570.19 frames. ], batch size: 129, lr: 6.29e-03, grad_scale: 8.0 +2023-03-09 10:34:52,626 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.976e+02 2.714e+02 3.242e+02 3.831e+02 6.997e+02, threshold=6.484e+02, percent-clipped=0.0 +2023-03-09 10:34:54,066 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63569.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:35:03,630 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63577.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:35:05,724 INFO [train.py:898] (1/4) Epoch 18, batch 1800, loss[loss=0.1647, simple_loss=0.262, pruned_loss=0.03374, over 18475.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2576, pruned_loss=0.04035, over 3562669.20 frames. ], batch size: 53, lr: 6.29e-03, grad_scale: 8.0 +2023-03-09 10:35:13,954 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63586.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:35:26,951 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8647, 3.7030, 5.0684, 2.7727, 4.4801, 2.6561, 3.0484, 1.8763], + device='cuda:1'), covar=tensor([0.1144, 0.0874, 0.0133, 0.0939, 0.0502, 0.2403, 0.2629, 0.2022], + device='cuda:1'), in_proj_covar=tensor([0.0214, 0.0238, 0.0169, 0.0190, 0.0250, 0.0263, 0.0314, 0.0226], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 10:36:05,192 INFO [train.py:898] (1/4) Epoch 18, batch 1850, loss[loss=0.1697, simple_loss=0.2621, pruned_loss=0.03863, over 18129.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2568, pruned_loss=0.04034, over 3566551.42 frames. ], batch size: 62, lr: 6.28e-03, grad_scale: 8.0 +2023-03-09 10:36:06,666 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63630.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:36:26,763 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63647.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:36:51,538 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.913e+02 2.867e+02 3.371e+02 3.791e+02 7.715e+02, threshold=6.743e+02, percent-clipped=3.0 +2023-03-09 10:37:04,238 INFO [train.py:898] (1/4) Epoch 18, batch 1900, loss[loss=0.1733, simple_loss=0.265, pruned_loss=0.04082, over 18624.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.257, pruned_loss=0.04034, over 3580177.53 frames. ], batch size: 52, lr: 6.28e-03, grad_scale: 8.0 +2023-03-09 10:38:02,803 INFO [train.py:898] (1/4) Epoch 18, batch 1950, loss[loss=0.2015, simple_loss=0.2824, pruned_loss=0.0603, over 18358.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2572, pruned_loss=0.04047, over 3576853.36 frames. ], batch size: 56, lr: 6.28e-03, grad_scale: 8.0 +2023-03-09 10:38:50,315 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.821e+02 2.688e+02 3.189e+02 3.862e+02 9.985e+02, threshold=6.379e+02, percent-clipped=3.0 +2023-03-09 10:38:59,655 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63776.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:39:02,000 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63778.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:39:02,985 INFO [train.py:898] (1/4) Epoch 18, batch 2000, loss[loss=0.1734, simple_loss=0.2626, pruned_loss=0.04208, over 18513.00 frames. ], tot_loss[loss=0.169, simple_loss=0.257, pruned_loss=0.04046, over 3580375.34 frames. ], batch size: 53, lr: 6.28e-03, grad_scale: 8.0 +2023-03-09 10:39:18,340 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2328, 5.3190, 4.5886, 5.2292, 5.2560, 4.7497, 5.1618, 4.7646], + device='cuda:1'), covar=tensor([0.0848, 0.0791, 0.2366, 0.1204, 0.0751, 0.0576, 0.0743, 0.1381], + device='cuda:1'), in_proj_covar=tensor([0.0478, 0.0537, 0.0691, 0.0425, 0.0431, 0.0493, 0.0531, 0.0658], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 10:39:36,220 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5938, 3.5889, 3.4625, 3.0625, 3.3246, 2.6606, 2.7148, 3.7091], + device='cuda:1'), covar=tensor([0.0061, 0.0083, 0.0072, 0.0138, 0.0089, 0.0199, 0.0207, 0.0049], + device='cuda:1'), in_proj_covar=tensor([0.0127, 0.0150, 0.0127, 0.0179, 0.0133, 0.0171, 0.0175, 0.0111], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 10:39:40,007 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63810.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:40:02,091 INFO [train.py:898] (1/4) Epoch 18, batch 2050, loss[loss=0.1633, simple_loss=0.2531, pruned_loss=0.03677, over 18487.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2575, pruned_loss=0.04062, over 3576155.65 frames. ], batch size: 53, lr: 6.27e-03, grad_scale: 8.0 +2023-03-09 10:40:03,580 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0647, 2.5221, 2.3001, 2.6747, 3.2143, 3.0466, 2.7893, 2.6952], + device='cuda:1'), covar=tensor([0.0201, 0.0238, 0.0507, 0.0319, 0.0180, 0.0184, 0.0369, 0.0329], + device='cuda:1'), in_proj_covar=tensor([0.0133, 0.0130, 0.0161, 0.0150, 0.0124, 0.0110, 0.0150, 0.0150], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 10:40:21,147 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63845.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:40:48,569 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.076e+02 2.804e+02 3.273e+02 4.132e+02 7.162e+02, threshold=6.546e+02, percent-clipped=3.0 +2023-03-09 10:40:53,098 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63871.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:40:59,880 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63877.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:41:01,205 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5417, 3.3950, 4.5776, 4.0496, 3.1144, 2.7689, 4.0247, 4.7408], + device='cuda:1'), covar=tensor([0.0805, 0.1381, 0.0206, 0.0452, 0.0916, 0.1219, 0.0411, 0.0351], + device='cuda:1'), in_proj_covar=tensor([0.0145, 0.0270, 0.0140, 0.0179, 0.0187, 0.0186, 0.0190, 0.0186], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 10:41:01,900 INFO [train.py:898] (1/4) Epoch 18, batch 2100, loss[loss=0.1731, simple_loss=0.2677, pruned_loss=0.03923, over 17157.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2576, pruned_loss=0.04048, over 3562158.96 frames. ], batch size: 78, lr: 6.27e-03, grad_scale: 8.0 +2023-03-09 10:41:33,716 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63906.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:41:45,772 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63916.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:41:56,439 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63925.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:41:56,458 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63925.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:42:00,984 INFO [train.py:898] (1/4) Epoch 18, batch 2150, loss[loss=0.1568, simple_loss=0.2368, pruned_loss=0.03839, over 17638.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2571, pruned_loss=0.03998, over 3570062.66 frames. ], batch size: 39, lr: 6.27e-03, grad_scale: 8.0 +2023-03-09 10:42:09,723 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 10:42:16,571 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63942.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:42:47,025 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.000e+02 2.608e+02 3.106e+02 3.507e+02 7.149e+02, threshold=6.212e+02, percent-clipped=2.0 +2023-03-09 10:42:57,473 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63977.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:42:59,823 INFO [train.py:898] (1/4) Epoch 18, batch 2200, loss[loss=0.1426, simple_loss=0.2307, pruned_loss=0.0272, over 18486.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2568, pruned_loss=0.04007, over 3576951.54 frames. ], batch size: 47, lr: 6.27e-03, grad_scale: 8.0 +2023-03-09 10:43:33,806 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.75 vs. limit=2.0 +2023-03-09 10:43:56,168 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64023.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:44:02,735 INFO [train.py:898] (1/4) Epoch 18, batch 2250, loss[loss=0.1399, simple_loss=0.2209, pruned_loss=0.02941, over 18443.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2562, pruned_loss=0.03953, over 3588865.11 frames. ], batch size: 43, lr: 6.26e-03, grad_scale: 8.0 +2023-03-09 10:44:47,978 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.732e+02 2.740e+02 3.229e+02 3.665e+02 1.396e+03, threshold=6.458e+02, percent-clipped=4.0 +2023-03-09 10:44:57,973 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64076.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:45:00,282 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64078.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:45:01,210 INFO [train.py:898] (1/4) Epoch 18, batch 2300, loss[loss=0.1589, simple_loss=0.247, pruned_loss=0.03539, over 18518.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.2564, pruned_loss=0.04013, over 3586880.06 frames. ], batch size: 47, lr: 6.26e-03, grad_scale: 8.0 +2023-03-09 10:45:07,183 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64084.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:45:54,297 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64124.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:45:56,504 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64126.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:45:59,677 INFO [train.py:898] (1/4) Epoch 18, batch 2350, loss[loss=0.147, simple_loss=0.2313, pruned_loss=0.03136, over 18433.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2567, pruned_loss=0.04014, over 3583984.60 frames. ], batch size: 43, lr: 6.26e-03, grad_scale: 8.0 +2023-03-09 10:46:43,658 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64166.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:46:45,510 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.074e+02 2.798e+02 3.262e+02 4.005e+02 1.022e+03, threshold=6.524e+02, percent-clipped=3.0 +2023-03-09 10:46:58,425 INFO [train.py:898] (1/4) Epoch 18, batch 2400, loss[loss=0.1924, simple_loss=0.2797, pruned_loss=0.05249, over 18063.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2566, pruned_loss=0.04013, over 3586926.99 frames. ], batch size: 62, lr: 6.26e-03, grad_scale: 8.0 +2023-03-09 10:47:00,056 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8922, 4.5862, 4.5636, 3.4142, 3.7071, 3.4220, 2.4986, 2.5776], + device='cuda:1'), covar=tensor([0.0207, 0.0150, 0.0088, 0.0335, 0.0346, 0.0237, 0.0809, 0.0871], + device='cuda:1'), in_proj_covar=tensor([0.0068, 0.0057, 0.0060, 0.0067, 0.0088, 0.0065, 0.0076, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 10:47:01,509 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.25 vs. limit=5.0 +2023-03-09 10:47:24,709 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64201.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:47:53,546 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64225.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:47:58,521 INFO [train.py:898] (1/4) Epoch 18, batch 2450, loss[loss=0.1899, simple_loss=0.2813, pruned_loss=0.04926, over 18349.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.2557, pruned_loss=0.03973, over 3579211.59 frames. ], batch size: 55, lr: 6.26e-03, grad_scale: 8.0 +2023-03-09 10:48:02,287 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64232.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:48:14,218 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64242.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:48:44,849 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.830e+02 2.733e+02 3.243e+02 4.040e+02 1.212e+03, threshold=6.487e+02, percent-clipped=5.0 +2023-03-09 10:48:49,138 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64272.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:48:50,285 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64273.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:48:57,009 INFO [train.py:898] (1/4) Epoch 18, batch 2500, loss[loss=0.1746, simple_loss=0.2687, pruned_loss=0.04025, over 17934.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2562, pruned_loss=0.0396, over 3578489.62 frames. ], batch size: 65, lr: 6.25e-03, grad_scale: 4.0 +2023-03-09 10:49:10,366 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64290.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:49:14,733 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64293.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:49:30,972 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.49 vs. limit=5.0 +2023-03-09 10:49:56,111 INFO [train.py:898] (1/4) Epoch 18, batch 2550, loss[loss=0.182, simple_loss=0.2649, pruned_loss=0.0496, over 12645.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2568, pruned_loss=0.03968, over 3575617.18 frames. ], batch size: 129, lr: 6.25e-03, grad_scale: 4.0 +2023-03-09 10:50:00,633 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9367, 4.1828, 2.4522, 4.0142, 5.2543, 2.5627, 3.8483, 4.0508], + device='cuda:1'), covar=tensor([0.0154, 0.1061, 0.1593, 0.0612, 0.0057, 0.1204, 0.0659, 0.0640], + device='cuda:1'), in_proj_covar=tensor([0.0154, 0.0261, 0.0201, 0.0194, 0.0114, 0.0180, 0.0212, 0.0220], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 10:50:10,239 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8963, 4.1789, 4.1282, 4.2133, 3.8230, 4.0775, 3.7653, 4.1247], + device='cuda:1'), covar=tensor([0.0270, 0.0347, 0.0278, 0.0499, 0.0356, 0.0263, 0.0949, 0.0317], + device='cuda:1'), in_proj_covar=tensor([0.0208, 0.0254, 0.0244, 0.0310, 0.0262, 0.0257, 0.0302, 0.0250], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 10:50:11,397 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4073, 3.8992, 3.9540, 3.0663, 3.3928, 3.0905, 2.4966, 2.3724], + device='cuda:1'), covar=tensor([0.0255, 0.0181, 0.0100, 0.0338, 0.0413, 0.0263, 0.0717, 0.0835], + device='cuda:1'), in_proj_covar=tensor([0.0068, 0.0057, 0.0060, 0.0067, 0.0088, 0.0065, 0.0076, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 10:50:13,625 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9683, 5.1740, 2.7115, 4.9640, 4.8692, 5.1945, 4.9321, 2.5467], + device='cuda:1'), covar=tensor([0.0197, 0.0069, 0.0760, 0.0087, 0.0073, 0.0071, 0.0103, 0.1077], + device='cuda:1'), in_proj_covar=tensor([0.0086, 0.0079, 0.0095, 0.0093, 0.0083, 0.0074, 0.0083, 0.0096], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 10:50:15,808 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64345.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:50:43,211 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.795e+02 2.856e+02 3.469e+02 4.229e+02 8.675e+02, threshold=6.938e+02, percent-clipped=2.0 +2023-03-09 10:50:54,971 INFO [train.py:898] (1/4) Epoch 18, batch 2600, loss[loss=0.1805, simple_loss=0.271, pruned_loss=0.04503, over 18616.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.2566, pruned_loss=0.03951, over 3581129.40 frames. ], batch size: 52, lr: 6.25e-03, grad_scale: 4.0 +2023-03-09 10:50:55,163 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64379.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:51:27,211 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64406.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:51:27,606 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-09 10:51:41,074 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3954, 5.9269, 5.5217, 5.6561, 5.4756, 5.3234, 5.9337, 5.8932], + device='cuda:1'), covar=tensor([0.1121, 0.0724, 0.0471, 0.0725, 0.1389, 0.0676, 0.0583, 0.0708], + device='cuda:1'), in_proj_covar=tensor([0.0596, 0.0508, 0.0369, 0.0530, 0.0732, 0.0531, 0.0718, 0.0549], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 10:51:53,362 INFO [train.py:898] (1/4) Epoch 18, batch 2650, loss[loss=0.1868, simple_loss=0.2875, pruned_loss=0.04305, over 18274.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2569, pruned_loss=0.04002, over 3590828.10 frames. ], batch size: 57, lr: 6.25e-03, grad_scale: 4.0 +2023-03-09 10:52:35,831 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64465.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:52:36,782 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64466.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:52:39,836 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.908e+02 2.670e+02 3.183e+02 3.846e+02 7.889e+02, threshold=6.366e+02, percent-clipped=2.0 +2023-03-09 10:52:52,156 INFO [train.py:898] (1/4) Epoch 18, batch 2700, loss[loss=0.1347, simple_loss=0.2188, pruned_loss=0.02532, over 18547.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2574, pruned_loss=0.04027, over 3588953.28 frames. ], batch size: 45, lr: 6.24e-03, grad_scale: 4.0 +2023-03-09 10:53:04,884 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1208, 5.7623, 5.3640, 5.5017, 5.3591, 5.1199, 5.8018, 5.7265], + device='cuda:1'), covar=tensor([0.1294, 0.0744, 0.0531, 0.0738, 0.1465, 0.0760, 0.0592, 0.0769], + device='cuda:1'), in_proj_covar=tensor([0.0600, 0.0510, 0.0371, 0.0534, 0.0733, 0.0533, 0.0726, 0.0551], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 10:53:18,449 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64501.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:53:32,917 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64514.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:53:47,607 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64526.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:53:50,698 INFO [train.py:898] (1/4) Epoch 18, batch 2750, loss[loss=0.1452, simple_loss=0.2286, pruned_loss=0.03085, over 18496.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2576, pruned_loss=0.04031, over 3579940.91 frames. ], batch size: 44, lr: 6.24e-03, grad_scale: 4.0 +2023-03-09 10:54:14,696 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64549.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:54:37,407 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.946e+02 2.775e+02 3.277e+02 4.138e+02 9.183e+02, threshold=6.554e+02, percent-clipped=4.0 +2023-03-09 10:54:41,153 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64572.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:54:49,209 INFO [train.py:898] (1/4) Epoch 18, batch 2800, loss[loss=0.1661, simple_loss=0.2563, pruned_loss=0.03797, over 18282.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2581, pruned_loss=0.04029, over 3580106.81 frames. ], batch size: 49, lr: 6.24e-03, grad_scale: 8.0 +2023-03-09 10:55:00,295 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64588.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:55:01,616 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64589.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:55:37,456 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64620.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:55:45,487 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7009, 5.2587, 5.2059, 5.2367, 4.7471, 5.1239, 4.5935, 5.1679], + device='cuda:1'), covar=tensor([0.0285, 0.0312, 0.0213, 0.0430, 0.0402, 0.0264, 0.1080, 0.0305], + device='cuda:1'), in_proj_covar=tensor([0.0208, 0.0253, 0.0242, 0.0311, 0.0261, 0.0257, 0.0297, 0.0249], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 10:55:47,452 INFO [train.py:898] (1/4) Epoch 18, batch 2850, loss[loss=0.1732, simple_loss=0.2681, pruned_loss=0.03914, over 17939.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2574, pruned_loss=0.04011, over 3576433.19 frames. ], batch size: 65, lr: 6.24e-03, grad_scale: 8.0 +2023-03-09 10:56:10,924 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64648.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:56:13,225 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64650.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:56:34,848 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.051e+02 2.756e+02 3.294e+02 3.793e+02 6.493e+02, threshold=6.588e+02, percent-clipped=0.0 +2023-03-09 10:56:46,032 INFO [train.py:898] (1/4) Epoch 18, batch 2900, loss[loss=0.1573, simple_loss=0.2458, pruned_loss=0.03436, over 18485.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.2572, pruned_loss=0.03995, over 3580663.57 frames. ], batch size: 47, lr: 6.23e-03, grad_scale: 8.0 +2023-03-09 10:56:46,441 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64679.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:57:12,515 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64701.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:57:14,944 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64703.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:57:21,970 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64709.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:57:39,630 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-09 10:57:42,512 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64727.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:57:44,571 INFO [train.py:898] (1/4) Epoch 18, batch 2950, loss[loss=0.1972, simple_loss=0.2887, pruned_loss=0.05283, over 18483.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.259, pruned_loss=0.04065, over 3567043.69 frames. ], batch size: 59, lr: 6.23e-03, grad_scale: 8.0 +2023-03-09 10:58:03,586 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64745.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:58:12,611 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64752.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:58:14,304 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-09 10:58:26,543 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64764.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:58:31,866 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.931e+02 2.790e+02 3.324e+02 3.890e+02 1.048e+03, threshold=6.648e+02, percent-clipped=3.0 +2023-03-09 10:58:43,040 INFO [train.py:898] (1/4) Epoch 18, batch 3000, loss[loss=0.1366, simple_loss=0.2181, pruned_loss=0.02759, over 18431.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2589, pruned_loss=0.04055, over 3580112.42 frames. ], batch size: 43, lr: 6.23e-03, grad_scale: 8.0 +2023-03-09 10:58:43,040 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 10:58:55,125 INFO [train.py:932] (1/4) Epoch 18, validation: loss=0.1513, simple_loss=0.2515, pruned_loss=0.02557, over 944034.00 frames. +2023-03-09 10:58:55,126 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 10:59:19,052 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-09 10:59:28,032 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64806.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:59:36,040 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64813.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:59:44,857 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64821.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:59:53,900 INFO [train.py:898] (1/4) Epoch 18, batch 3050, loss[loss=0.1749, simple_loss=0.2612, pruned_loss=0.04432, over 18253.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2574, pruned_loss=0.04, over 3591805.15 frames. ], batch size: 60, lr: 6.23e-03, grad_scale: 8.0 +2023-03-09 11:00:41,792 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.847e+02 2.582e+02 2.978e+02 3.625e+02 9.287e+02, threshold=5.956e+02, percent-clipped=2.0 +2023-03-09 11:00:52,636 INFO [train.py:898] (1/4) Epoch 18, batch 3100, loss[loss=0.194, simple_loss=0.2897, pruned_loss=0.04909, over 18314.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2578, pruned_loss=0.04039, over 3589852.84 frames. ], batch size: 54, lr: 6.22e-03, grad_scale: 8.0 +2023-03-09 11:01:03,674 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64888.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:01:25,490 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-09 11:01:51,091 INFO [train.py:898] (1/4) Epoch 18, batch 3150, loss[loss=0.165, simple_loss=0.2554, pruned_loss=0.03733, over 18392.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2575, pruned_loss=0.03995, over 3593853.79 frames. ], batch size: 52, lr: 6.22e-03, grad_scale: 8.0 +2023-03-09 11:01:59,104 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64936.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:02:10,216 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64945.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:02:38,250 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.946e+02 2.751e+02 3.227e+02 4.235e+02 1.394e+03, threshold=6.453e+02, percent-clipped=7.0 +2023-03-09 11:02:46,526 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64976.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:02:49,635 INFO [train.py:898] (1/4) Epoch 18, batch 3200, loss[loss=0.1821, simple_loss=0.2685, pruned_loss=0.04785, over 17684.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2578, pruned_loss=0.04014, over 3584948.20 frames. ], batch size: 70, lr: 6.22e-03, grad_scale: 8.0 +2023-03-09 11:03:16,795 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65001.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:03:20,125 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65004.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:03:48,370 INFO [train.py:898] (1/4) Epoch 18, batch 3250, loss[loss=0.1849, simple_loss=0.2745, pruned_loss=0.04762, over 18401.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2572, pruned_loss=0.04021, over 3574080.34 frames. ], batch size: 52, lr: 6.22e-03, grad_scale: 8.0 +2023-03-09 11:03:58,019 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65037.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 11:04:12,035 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65049.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:04:18,544 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0337, 4.3084, 2.4091, 4.2191, 5.3559, 2.9430, 3.7248, 3.8634], + device='cuda:1'), covar=tensor([0.0174, 0.1149, 0.1662, 0.0635, 0.0080, 0.1029, 0.0783, 0.0925], + device='cuda:1'), in_proj_covar=tensor([0.0155, 0.0263, 0.0202, 0.0195, 0.0115, 0.0181, 0.0213, 0.0221], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 11:04:19,642 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65055.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:04:24,045 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65059.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:04:28,057 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-09 11:04:35,617 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.699e+02 2.719e+02 3.291e+02 4.073e+02 7.543e+02, threshold=6.581e+02, percent-clipped=1.0 +2023-03-09 11:04:46,973 INFO [train.py:898] (1/4) Epoch 18, batch 3300, loss[loss=0.1711, simple_loss=0.2608, pruned_loss=0.04075, over 18496.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2572, pruned_loss=0.04006, over 3568018.49 frames. ], batch size: 51, lr: 6.21e-03, grad_scale: 4.0 +2023-03-09 11:05:13,341 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65101.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:05:21,862 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65108.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:05:31,491 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65116.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:05:32,638 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5922, 4.1553, 4.1293, 3.1955, 3.5798, 3.2516, 2.5154, 2.3377], + device='cuda:1'), covar=tensor([0.0247, 0.0169, 0.0106, 0.0329, 0.0331, 0.0239, 0.0746, 0.0872], + device='cuda:1'), in_proj_covar=tensor([0.0068, 0.0057, 0.0060, 0.0067, 0.0087, 0.0066, 0.0076, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 11:05:37,657 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65121.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:05:46,586 INFO [train.py:898] (1/4) Epoch 18, batch 3350, loss[loss=0.1541, simple_loss=0.2399, pruned_loss=0.03418, over 18257.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2565, pruned_loss=0.03969, over 3575203.08 frames. ], batch size: 47, lr: 6.21e-03, grad_scale: 4.0 +2023-03-09 11:06:33,118 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65169.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:06:34,021 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.758e+02 2.720e+02 3.418e+02 4.125e+02 2.086e+03, threshold=6.835e+02, percent-clipped=5.0 +2023-03-09 11:06:44,720 INFO [train.py:898] (1/4) Epoch 18, batch 3400, loss[loss=0.1811, simple_loss=0.274, pruned_loss=0.04408, over 18295.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2573, pruned_loss=0.0402, over 3583637.47 frames. ], batch size: 57, lr: 6.21e-03, grad_scale: 4.0 +2023-03-09 11:07:43,149 INFO [train.py:898] (1/4) Epoch 18, batch 3450, loss[loss=0.1922, simple_loss=0.2771, pruned_loss=0.05363, over 18139.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2571, pruned_loss=0.04028, over 3574809.94 frames. ], batch size: 62, lr: 6.21e-03, grad_scale: 4.0 +2023-03-09 11:07:43,968 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.45 vs. limit=5.0 +2023-03-09 11:08:01,665 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65245.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:08:24,784 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8506, 4.8182, 4.9337, 4.6840, 4.6166, 4.7374, 5.0049, 4.9555], + device='cuda:1'), covar=tensor([0.0069, 0.0068, 0.0053, 0.0093, 0.0070, 0.0124, 0.0065, 0.0086], + device='cuda:1'), in_proj_covar=tensor([0.0091, 0.0067, 0.0071, 0.0090, 0.0073, 0.0101, 0.0085, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 11:08:31,047 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.103e+02 2.885e+02 3.344e+02 3.937e+02 8.073e+02, threshold=6.688e+02, percent-clipped=3.0 +2023-03-09 11:08:37,245 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6427, 3.4432, 4.7476, 4.1027, 2.9704, 2.7630, 4.0258, 4.9707], + device='cuda:1'), covar=tensor([0.0858, 0.1340, 0.0190, 0.0477, 0.1132, 0.1350, 0.0467, 0.0210], + device='cuda:1'), in_proj_covar=tensor([0.0147, 0.0273, 0.0142, 0.0181, 0.0191, 0.0190, 0.0193, 0.0189], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 11:08:42,001 INFO [train.py:898] (1/4) Epoch 18, batch 3500, loss[loss=0.1621, simple_loss=0.2564, pruned_loss=0.03387, over 18359.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2574, pruned_loss=0.04026, over 3582698.15 frames. ], batch size: 55, lr: 6.20e-03, grad_scale: 4.0 +2023-03-09 11:08:48,978 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65285.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:08:58,122 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65293.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:09:10,782 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65304.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:09:28,619 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.18 vs. limit=5.0 +2023-03-09 11:09:30,289 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8686, 2.9426, 4.4758, 3.8980, 2.7795, 4.8015, 4.0957, 2.9500], + device='cuda:1'), covar=tensor([0.0436, 0.1297, 0.0233, 0.0395, 0.1320, 0.0158, 0.0485, 0.0891], + device='cuda:1'), in_proj_covar=tensor([0.0200, 0.0228, 0.0195, 0.0152, 0.0217, 0.0199, 0.0234, 0.0192], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 11:09:37,583 INFO [train.py:898] (1/4) Epoch 18, batch 3550, loss[loss=0.201, simple_loss=0.2968, pruned_loss=0.05255, over 18361.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2567, pruned_loss=0.03979, over 3584853.18 frames. ], batch size: 56, lr: 6.20e-03, grad_scale: 4.0 +2023-03-09 11:09:41,606 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65332.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 11:09:45,757 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4903, 5.2674, 5.6977, 5.6649, 5.3345, 6.2252, 5.8966, 5.4404], + device='cuda:1'), covar=tensor([0.1052, 0.0557, 0.0641, 0.0622, 0.1396, 0.0663, 0.0489, 0.1664], + device='cuda:1'), in_proj_covar=tensor([0.0350, 0.0274, 0.0300, 0.0299, 0.0325, 0.0411, 0.0273, 0.0400], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:1') +2023-03-09 11:09:57,292 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65346.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:10:03,348 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65352.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:10:10,818 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65359.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:10:23,173 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.008e+02 2.567e+02 2.931e+02 3.482e+02 6.619e+02, threshold=5.862e+02, percent-clipped=0.0 +2023-03-09 11:10:31,693 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-03-09 11:10:33,170 INFO [train.py:898] (1/4) Epoch 18, batch 3600, loss[loss=0.1995, simple_loss=0.2945, pruned_loss=0.05225, over 18413.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.257, pruned_loss=0.03986, over 3582895.75 frames. ], batch size: 52, lr: 6.20e-03, grad_scale: 8.0 +2023-03-09 11:10:53,604 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 11:10:57,559 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65401.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:11:03,662 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65407.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:11:04,727 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65408.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:11:07,311 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65411.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:11:38,111 INFO [train.py:898] (1/4) Epoch 19, batch 0, loss[loss=0.1807, simple_loss=0.2784, pruned_loss=0.04144, over 17829.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2784, pruned_loss=0.04144, over 17829.00 frames. ], batch size: 70, lr: 6.03e-03, grad_scale: 8.0 +2023-03-09 11:11:38,112 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 11:11:49,829 INFO [train.py:932] (1/4) Epoch 19, validation: loss=0.1513, simple_loss=0.2518, pruned_loss=0.02538, over 944034.00 frames. +2023-03-09 11:11:49,830 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 11:11:55,708 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5354, 5.2652, 5.7851, 5.7205, 5.4454, 6.2939, 5.9531, 5.5648], + device='cuda:1'), covar=tensor([0.1025, 0.0573, 0.0629, 0.0691, 0.1166, 0.0583, 0.0550, 0.1518], + device='cuda:1'), in_proj_covar=tensor([0.0348, 0.0273, 0.0297, 0.0297, 0.0323, 0.0408, 0.0271, 0.0397], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:1') +2023-03-09 11:12:32,066 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65449.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:12:40,037 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65456.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:12:47,634 INFO [train.py:898] (1/4) Epoch 19, batch 50, loss[loss=0.1897, simple_loss=0.2817, pruned_loss=0.0488, over 18019.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2574, pruned_loss=0.03921, over 822003.25 frames. ], batch size: 65, lr: 6.03e-03, grad_scale: 8.0 +2023-03-09 11:12:55,633 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.183e+02 3.168e+02 3.745e+02 4.486e+02 9.575e+02, threshold=7.490e+02, percent-clipped=6.0 +2023-03-09 11:12:58,281 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4022, 2.1906, 3.8915, 3.6782, 2.1689, 4.0757, 3.6426, 2.5356], + device='cuda:1'), covar=tensor([0.0482, 0.2142, 0.0343, 0.0319, 0.2201, 0.0305, 0.0657, 0.1315], + device='cuda:1'), in_proj_covar=tensor([0.0198, 0.0225, 0.0193, 0.0151, 0.0214, 0.0197, 0.0232, 0.0190], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 11:13:46,482 INFO [train.py:898] (1/4) Epoch 19, batch 100, loss[loss=0.1905, simple_loss=0.2815, pruned_loss=0.04976, over 18500.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2576, pruned_loss=0.03983, over 1440004.22 frames. ], batch size: 53, lr: 6.03e-03, grad_scale: 8.0 +2023-03-09 11:14:08,278 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65532.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 11:14:44,739 INFO [train.py:898] (1/4) Epoch 19, batch 150, loss[loss=0.1691, simple_loss=0.2572, pruned_loss=0.04044, over 18494.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.2565, pruned_loss=0.03917, over 1920744.22 frames. ], batch size: 47, lr: 6.02e-03, grad_scale: 4.0 +2023-03-09 11:14:53,874 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.910e+02 2.761e+02 3.150e+02 3.838e+02 8.694e+02, threshold=6.300e+02, percent-clipped=3.0 +2023-03-09 11:15:20,724 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65593.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 11:15:44,542 INFO [train.py:898] (1/4) Epoch 19, batch 200, loss[loss=0.1571, simple_loss=0.235, pruned_loss=0.03956, over 17640.00 frames. ], tot_loss[loss=0.1661, simple_loss=0.2553, pruned_loss=0.03847, over 2292453.93 frames. ], batch size: 39, lr: 6.02e-03, grad_scale: 4.0 +2023-03-09 11:15:47,145 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9526, 5.4593, 5.4288, 5.5074, 4.9538, 5.3894, 4.7744, 5.3136], + device='cuda:1'), covar=tensor([0.0241, 0.0274, 0.0185, 0.0356, 0.0387, 0.0248, 0.1045, 0.0310], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0256, 0.0245, 0.0316, 0.0263, 0.0262, 0.0302, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 11:16:04,443 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7558, 3.6263, 4.8476, 4.3040, 3.1682, 2.7523, 4.3832, 5.1608], + device='cuda:1'), covar=tensor([0.0799, 0.1575, 0.0227, 0.0416, 0.0998, 0.1322, 0.0383, 0.0257], + device='cuda:1'), in_proj_covar=tensor([0.0147, 0.0271, 0.0143, 0.0179, 0.0191, 0.0189, 0.0192, 0.0189], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 11:16:06,612 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65632.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:16:17,261 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65641.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:16:43,302 INFO [train.py:898] (1/4) Epoch 19, batch 250, loss[loss=0.1583, simple_loss=0.2443, pruned_loss=0.03613, over 18507.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2558, pruned_loss=0.0389, over 2576744.27 frames. ], batch size: 47, lr: 6.02e-03, grad_scale: 4.0 +2023-03-09 11:16:52,483 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.860e+02 2.820e+02 3.379e+02 4.029e+02 8.035e+02, threshold=6.759e+02, percent-clipped=4.0 +2023-03-09 11:17:02,986 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65680.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:17:12,186 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.5884, 5.0389, 4.9668, 5.0472, 4.6213, 4.9607, 4.4563, 4.9128], + device='cuda:1'), covar=tensor([0.0231, 0.0306, 0.0236, 0.0428, 0.0366, 0.0235, 0.1045, 0.0341], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0255, 0.0245, 0.0316, 0.0264, 0.0261, 0.0302, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 11:17:31,859 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9634, 4.1663, 2.5205, 4.0220, 5.2466, 2.4142, 3.8096, 4.1287], + device='cuda:1'), covar=tensor([0.0155, 0.1080, 0.1641, 0.0744, 0.0069, 0.1513, 0.0726, 0.0665], + device='cuda:1'), in_proj_covar=tensor([0.0156, 0.0262, 0.0200, 0.0194, 0.0116, 0.0179, 0.0212, 0.0220], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 11:17:40,380 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65711.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:17:42,324 INFO [train.py:898] (1/4) Epoch 19, batch 300, loss[loss=0.1663, simple_loss=0.2523, pruned_loss=0.04013, over 18394.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.2556, pruned_loss=0.0389, over 2802247.94 frames. ], batch size: 48, lr: 6.02e-03, grad_scale: 4.0 +2023-03-09 11:18:24,016 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-09 11:18:35,673 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65759.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:18:40,572 INFO [train.py:898] (1/4) Epoch 19, batch 350, loss[loss=0.1681, simple_loss=0.2488, pruned_loss=0.04369, over 18481.00 frames. ], tot_loss[loss=0.1661, simple_loss=0.2548, pruned_loss=0.03874, over 2973333.19 frames. ], batch size: 44, lr: 6.01e-03, grad_scale: 4.0 +2023-03-09 11:18:49,493 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.996e+02 2.649e+02 2.976e+02 3.551e+02 5.920e+02, threshold=5.952e+02, percent-clipped=0.0 +2023-03-09 11:18:51,290 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.92 vs. limit=5.0 +2023-03-09 11:19:38,373 INFO [train.py:898] (1/4) Epoch 19, batch 400, loss[loss=0.2052, simple_loss=0.2871, pruned_loss=0.06166, over 12468.00 frames. ], tot_loss[loss=0.1664, simple_loss=0.2552, pruned_loss=0.03874, over 3111974.16 frames. ], batch size: 129, lr: 6.01e-03, grad_scale: 8.0 +2023-03-09 11:20:07,658 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65838.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 11:20:13,172 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4268, 5.4105, 4.9966, 5.2989, 5.3237, 4.6995, 5.2430, 5.0323], + device='cuda:1'), covar=tensor([0.0382, 0.0413, 0.1208, 0.0806, 0.0556, 0.0454, 0.0422, 0.1034], + device='cuda:1'), in_proj_covar=tensor([0.0464, 0.0534, 0.0686, 0.0422, 0.0431, 0.0490, 0.0526, 0.0659], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 11:20:37,147 INFO [train.py:898] (1/4) Epoch 19, batch 450, loss[loss=0.2023, simple_loss=0.2856, pruned_loss=0.0595, over 18035.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.2552, pruned_loss=0.03911, over 3225461.10 frames. ], batch size: 65, lr: 6.01e-03, grad_scale: 8.0 +2023-03-09 11:20:46,699 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.864e+02 3.029e+02 3.525e+02 4.091e+02 6.836e+02, threshold=7.049e+02, percent-clipped=6.0 +2023-03-09 11:21:05,988 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65888.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:21:18,631 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65899.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 11:21:22,715 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-03-09 11:21:35,175 INFO [train.py:898] (1/4) Epoch 19, batch 500, loss[loss=0.1641, simple_loss=0.2589, pruned_loss=0.03461, over 18400.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.2554, pruned_loss=0.03901, over 3306081.75 frames. ], batch size: 52, lr: 6.01e-03, grad_scale: 8.0 +2023-03-09 11:22:01,554 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7202, 4.4819, 4.4916, 3.3653, 3.7103, 3.4016, 2.4921, 2.5101], + device='cuda:1'), covar=tensor([0.0221, 0.0132, 0.0070, 0.0309, 0.0330, 0.0216, 0.0772, 0.0791], + device='cuda:1'), in_proj_covar=tensor([0.0068, 0.0057, 0.0060, 0.0067, 0.0087, 0.0065, 0.0075, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 11:22:08,478 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65941.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:22:24,500 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 11:22:33,523 INFO [train.py:898] (1/4) Epoch 19, batch 550, loss[loss=0.1755, simple_loss=0.2682, pruned_loss=0.04139, over 17792.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2555, pruned_loss=0.03904, over 3372253.21 frames. ], batch size: 70, lr: 6.01e-03, grad_scale: 8.0 +2023-03-09 11:22:42,910 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.825e+02 2.542e+02 3.090e+02 3.456e+02 5.520e+02, threshold=6.179e+02, percent-clipped=0.0 +2023-03-09 11:23:04,306 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65989.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:23:13,300 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7259, 4.4240, 4.4117, 3.2698, 3.7186, 3.3953, 2.5473, 2.3037], + device='cuda:1'), covar=tensor([0.0210, 0.0141, 0.0080, 0.0334, 0.0292, 0.0227, 0.0750, 0.0871], + device='cuda:1'), in_proj_covar=tensor([0.0068, 0.0057, 0.0059, 0.0067, 0.0086, 0.0065, 0.0075, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 11:23:35,297 INFO [train.py:898] (1/4) Epoch 19, batch 600, loss[loss=0.1859, simple_loss=0.2702, pruned_loss=0.05077, over 18202.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2563, pruned_loss=0.0395, over 3421602.61 frames. ], batch size: 60, lr: 6.00e-03, grad_scale: 8.0 +2023-03-09 11:23:49,576 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5633, 2.8992, 2.6222, 2.9281, 3.6193, 3.4801, 3.1821, 2.9215], + device='cuda:1'), covar=tensor([0.0217, 0.0255, 0.0561, 0.0343, 0.0182, 0.0175, 0.0313, 0.0360], + device='cuda:1'), in_proj_covar=tensor([0.0134, 0.0130, 0.0161, 0.0153, 0.0124, 0.0111, 0.0149, 0.0150], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 11:24:06,179 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3728, 3.3365, 2.2498, 4.2358, 2.8954, 4.0086, 2.3374, 3.8142], + device='cuda:1'), covar=tensor([0.0649, 0.0757, 0.1411, 0.0433, 0.0872, 0.0298, 0.1171, 0.0401], + device='cuda:1'), in_proj_covar=tensor([0.0208, 0.0221, 0.0188, 0.0272, 0.0188, 0.0259, 0.0198, 0.0195], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 11:24:17,649 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8705, 4.1745, 2.4825, 4.0264, 5.1965, 2.5038, 3.7537, 3.8396], + device='cuda:1'), covar=tensor([0.0209, 0.1046, 0.1504, 0.0603, 0.0069, 0.1238, 0.0692, 0.0783], + device='cuda:1'), in_proj_covar=tensor([0.0158, 0.0263, 0.0202, 0.0196, 0.0117, 0.0180, 0.0214, 0.0222], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 11:24:34,017 INFO [train.py:898] (1/4) Epoch 19, batch 650, loss[loss=0.1546, simple_loss=0.2439, pruned_loss=0.0327, over 18412.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2574, pruned_loss=0.04014, over 3455171.12 frames. ], batch size: 48, lr: 6.00e-03, grad_scale: 8.0 +2023-03-09 11:24:42,672 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.920e+02 2.591e+02 2.961e+02 3.650e+02 8.631e+02, threshold=5.923e+02, percent-clipped=2.0 +2023-03-09 11:24:59,047 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.6253, 6.1426, 5.6781, 5.9599, 5.7530, 5.5943, 6.2299, 6.1322], + device='cuda:1'), covar=tensor([0.1178, 0.0719, 0.0418, 0.0707, 0.1447, 0.0701, 0.0556, 0.0689], + device='cuda:1'), in_proj_covar=tensor([0.0601, 0.0517, 0.0372, 0.0541, 0.0737, 0.0538, 0.0733, 0.0551], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 11:25:32,802 INFO [train.py:898] (1/4) Epoch 19, batch 700, loss[loss=0.1722, simple_loss=0.2641, pruned_loss=0.04013, over 18623.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2577, pruned_loss=0.04033, over 3488759.27 frames. ], batch size: 52, lr: 6.00e-03, grad_scale: 8.0 +2023-03-09 11:26:04,182 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1509, 5.1223, 4.7902, 5.0350, 5.0623, 4.5028, 4.9795, 4.7586], + device='cuda:1'), covar=tensor([0.0344, 0.0394, 0.1102, 0.0711, 0.0500, 0.0391, 0.0386, 0.0896], + device='cuda:1'), in_proj_covar=tensor([0.0460, 0.0533, 0.0680, 0.0417, 0.0427, 0.0486, 0.0523, 0.0655], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 11:26:09,926 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66144.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:26:27,970 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66160.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:26:31,045 INFO [train.py:898] (1/4) Epoch 19, batch 750, loss[loss=0.1544, simple_loss=0.2398, pruned_loss=0.03444, over 17666.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.257, pruned_loss=0.03996, over 3511766.53 frames. ], batch size: 39, lr: 6.00e-03, grad_scale: 8.0 +2023-03-09 11:26:40,057 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.858e+02 2.715e+02 3.373e+02 4.119e+02 8.892e+02, threshold=6.747e+02, percent-clipped=6.0 +2023-03-09 11:27:02,121 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66188.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:27:08,747 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66194.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 11:27:21,521 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66205.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 11:27:21,625 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6620, 2.4115, 2.7893, 2.8432, 3.2598, 5.1433, 4.8904, 3.8412], + device='cuda:1'), covar=tensor([0.1626, 0.2189, 0.2771, 0.1639, 0.2196, 0.0149, 0.0326, 0.0664], + device='cuda:1'), in_proj_covar=tensor([0.0293, 0.0343, 0.0373, 0.0273, 0.0390, 0.0234, 0.0296, 0.0249], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 11:27:30,199 INFO [train.py:898] (1/4) Epoch 19, batch 800, loss[loss=0.1585, simple_loss=0.2465, pruned_loss=0.03523, over 18336.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2569, pruned_loss=0.03974, over 3524885.00 frames. ], batch size: 46, lr: 5.99e-03, grad_scale: 8.0 +2023-03-09 11:27:39,581 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66221.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 11:27:58,029 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66236.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:28:23,805 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66258.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:28:29,019 INFO [train.py:898] (1/4) Epoch 19, batch 850, loss[loss=0.1741, simple_loss=0.2622, pruned_loss=0.04296, over 18330.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2571, pruned_loss=0.03964, over 3537912.28 frames. ], batch size: 55, lr: 5.99e-03, grad_scale: 8.0 +2023-03-09 11:28:34,456 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-09 11:28:37,711 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9109, 3.7221, 5.0156, 4.4759, 3.2577, 3.0737, 4.4345, 5.2773], + device='cuda:1'), covar=tensor([0.0799, 0.1459, 0.0166, 0.0358, 0.0899, 0.1080, 0.0375, 0.0186], + device='cuda:1'), in_proj_covar=tensor([0.0145, 0.0269, 0.0141, 0.0178, 0.0188, 0.0186, 0.0190, 0.0186], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 11:28:38,323 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.829e+02 2.659e+02 3.037e+02 3.502e+02 7.094e+02, threshold=6.073e+02, percent-clipped=1.0 +2023-03-09 11:29:27,610 INFO [train.py:898] (1/4) Epoch 19, batch 900, loss[loss=0.1647, simple_loss=0.2527, pruned_loss=0.03833, over 18409.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2564, pruned_loss=0.03954, over 3551166.49 frames. ], batch size: 52, lr: 5.99e-03, grad_scale: 8.0 +2023-03-09 11:29:34,930 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66319.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:29:52,721 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2323, 4.3857, 2.5916, 4.2129, 5.4214, 2.6416, 4.1490, 4.3994], + device='cuda:1'), covar=tensor([0.0111, 0.0820, 0.1358, 0.0522, 0.0055, 0.1066, 0.0500, 0.0496], + device='cuda:1'), in_proj_covar=tensor([0.0155, 0.0262, 0.0199, 0.0193, 0.0117, 0.0179, 0.0212, 0.0220], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 11:30:26,352 INFO [train.py:898] (1/4) Epoch 19, batch 950, loss[loss=0.1552, simple_loss=0.2438, pruned_loss=0.03332, over 18408.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2558, pruned_loss=0.03925, over 3549549.31 frames. ], batch size: 48, lr: 5.99e-03, grad_scale: 8.0 +2023-03-09 11:30:35,531 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.870e+02 2.786e+02 3.276e+02 3.823e+02 6.453e+02, threshold=6.553e+02, percent-clipped=1.0 +2023-03-09 11:31:02,053 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7835, 4.5374, 4.5486, 3.3898, 3.8315, 3.4416, 2.7227, 2.5142], + device='cuda:1'), covar=tensor([0.0194, 0.0116, 0.0086, 0.0296, 0.0295, 0.0195, 0.0663, 0.0781], + device='cuda:1'), in_proj_covar=tensor([0.0069, 0.0058, 0.0060, 0.0068, 0.0087, 0.0066, 0.0076, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 11:31:15,171 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.52 vs. limit=5.0 +2023-03-09 11:31:24,780 INFO [train.py:898] (1/4) Epoch 19, batch 1000, loss[loss=0.1986, simple_loss=0.2927, pruned_loss=0.05227, over 17147.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2569, pruned_loss=0.03951, over 3569800.55 frames. ], batch size: 78, lr: 5.99e-03, grad_scale: 8.0 +2023-03-09 11:31:49,959 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0048, 4.7303, 4.7627, 3.5245, 3.9515, 3.6352, 3.1252, 2.6778], + device='cuda:1'), covar=tensor([0.0183, 0.0097, 0.0063, 0.0281, 0.0334, 0.0193, 0.0546, 0.0732], + device='cuda:1'), in_proj_covar=tensor([0.0069, 0.0058, 0.0060, 0.0068, 0.0087, 0.0066, 0.0076, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 11:32:06,496 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8334, 2.4340, 2.2645, 2.5174, 2.9867, 2.8273, 2.6547, 2.4998], + device='cuda:1'), covar=tensor([0.0198, 0.0243, 0.0506, 0.0373, 0.0185, 0.0194, 0.0374, 0.0375], + device='cuda:1'), in_proj_covar=tensor([0.0135, 0.0131, 0.0163, 0.0153, 0.0126, 0.0111, 0.0151, 0.0152], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 11:32:16,115 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.91 vs. limit=5.0 +2023-03-09 11:32:20,707 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-09 11:32:23,346 INFO [train.py:898] (1/4) Epoch 19, batch 1050, loss[loss=0.1765, simple_loss=0.2658, pruned_loss=0.04358, over 18392.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2569, pruned_loss=0.03945, over 3574612.32 frames. ], batch size: 52, lr: 5.98e-03, grad_scale: 8.0 +2023-03-09 11:32:32,546 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.909e+02 2.933e+02 3.279e+02 4.197e+02 8.258e+02, threshold=6.558e+02, percent-clipped=3.0 +2023-03-09 11:32:59,628 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66494.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 11:33:06,937 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66500.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 11:33:22,945 INFO [train.py:898] (1/4) Epoch 19, batch 1100, loss[loss=0.1818, simple_loss=0.2724, pruned_loss=0.04563, over 18118.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2576, pruned_loss=0.0397, over 3565682.89 frames. ], batch size: 62, lr: 5.98e-03, grad_scale: 8.0 +2023-03-09 11:33:26,439 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66516.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 11:33:29,946 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.4995, 4.9011, 4.8100, 4.8723, 4.4355, 4.8290, 4.1928, 4.7995], + device='cuda:1'), covar=tensor([0.0253, 0.0316, 0.0268, 0.0436, 0.0378, 0.0225, 0.1174, 0.0324], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0256, 0.0245, 0.0320, 0.0263, 0.0262, 0.0305, 0.0252], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 11:33:32,002 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66521.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:33:39,979 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7490, 3.1774, 3.8839, 2.8324, 3.5564, 2.6690, 2.7741, 2.3234], + device='cuda:1'), covar=tensor([0.1027, 0.0905, 0.0260, 0.0674, 0.0646, 0.1901, 0.2054, 0.1603], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0238, 0.0175, 0.0191, 0.0250, 0.0266, 0.0316, 0.0229], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 11:33:42,116 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1256, 5.1777, 5.2153, 5.0092, 4.9467, 4.9306, 5.3259, 5.3083], + device='cuda:1'), covar=tensor([0.0068, 0.0063, 0.0053, 0.0086, 0.0060, 0.0145, 0.0071, 0.0085], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0067, 0.0071, 0.0090, 0.0073, 0.0101, 0.0084, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 11:33:51,682 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.57 vs. limit=5.0 +2023-03-09 11:33:56,080 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66542.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 11:34:19,087 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2949, 5.1765, 5.4864, 5.5357, 5.2714, 6.0431, 5.7503, 5.2746], + device='cuda:1'), covar=tensor([0.1084, 0.0620, 0.0682, 0.0756, 0.1500, 0.0757, 0.0597, 0.1843], + device='cuda:1'), in_proj_covar=tensor([0.0351, 0.0275, 0.0299, 0.0301, 0.0324, 0.0414, 0.0270, 0.0400], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:1') +2023-03-09 11:34:20,996 INFO [train.py:898] (1/4) Epoch 19, batch 1150, loss[loss=0.1916, simple_loss=0.2828, pruned_loss=0.05027, over 18615.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2579, pruned_loss=0.03998, over 3564569.71 frames. ], batch size: 52, lr: 5.98e-03, grad_scale: 8.0 +2023-03-09 11:34:29,833 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.816e+02 2.725e+02 3.159e+02 3.763e+02 6.148e+02, threshold=6.318e+02, percent-clipped=0.0 +2023-03-09 11:34:36,271 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-09 11:34:42,854 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66582.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:35:10,281 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4868, 3.4057, 2.1980, 4.3932, 3.0662, 4.2775, 2.5807, 3.9559], + device='cuda:1'), covar=tensor([0.0648, 0.0851, 0.1505, 0.0495, 0.0919, 0.0336, 0.1120, 0.0405], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0220, 0.0186, 0.0274, 0.0188, 0.0258, 0.0198, 0.0195], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 11:35:19,957 INFO [train.py:898] (1/4) Epoch 19, batch 1200, loss[loss=0.1709, simple_loss=0.2589, pruned_loss=0.04142, over 17001.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2576, pruned_loss=0.03971, over 3577786.01 frames. ], batch size: 78, lr: 5.98e-03, grad_scale: 8.0 +2023-03-09 11:35:21,292 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66614.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:36:18,957 INFO [train.py:898] (1/4) Epoch 19, batch 1250, loss[loss=0.1475, simple_loss=0.2384, pruned_loss=0.02833, over 18493.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2575, pruned_loss=0.03973, over 3583855.55 frames. ], batch size: 47, lr: 5.97e-03, grad_scale: 8.0 +2023-03-09 11:36:27,893 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.899e+02 2.792e+02 3.362e+02 4.146e+02 6.699e+02, threshold=6.725e+02, percent-clipped=2.0 +2023-03-09 11:37:07,670 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 11:37:16,537 INFO [train.py:898] (1/4) Epoch 19, batch 1300, loss[loss=0.1678, simple_loss=0.2588, pruned_loss=0.03843, over 18261.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.2568, pruned_loss=0.0392, over 3594392.43 frames. ], batch size: 57, lr: 5.97e-03, grad_scale: 8.0 +2023-03-09 11:37:22,020 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7718, 2.9393, 2.8287, 3.0341, 3.8083, 3.8275, 3.2946, 3.0426], + device='cuda:1'), covar=tensor([0.0148, 0.0284, 0.0496, 0.0380, 0.0183, 0.0124, 0.0321, 0.0370], + device='cuda:1'), in_proj_covar=tensor([0.0134, 0.0131, 0.0161, 0.0154, 0.0125, 0.0110, 0.0149, 0.0151], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 11:37:37,969 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66731.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 11:37:40,274 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4508, 2.7391, 2.5500, 2.8311, 3.5366, 3.5400, 3.0020, 2.7787], + device='cuda:1'), covar=tensor([0.0190, 0.0257, 0.0521, 0.0385, 0.0184, 0.0126, 0.0350, 0.0411], + device='cuda:1'), in_proj_covar=tensor([0.0134, 0.0130, 0.0161, 0.0153, 0.0125, 0.0109, 0.0149, 0.0150], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 11:38:14,575 INFO [train.py:898] (1/4) Epoch 19, batch 1350, loss[loss=0.1499, simple_loss=0.234, pruned_loss=0.0329, over 18429.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.256, pruned_loss=0.03889, over 3593127.18 frames. ], batch size: 42, lr: 5.97e-03, grad_scale: 8.0 +2023-03-09 11:38:24,649 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.905e+02 2.817e+02 3.306e+02 3.967e+02 8.245e+02, threshold=6.612e+02, percent-clipped=2.0 +2023-03-09 11:38:48,258 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66792.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:38:57,344 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66800.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 11:39:04,024 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-09 11:39:12,434 INFO [train.py:898] (1/4) Epoch 19, batch 1400, loss[loss=0.1748, simple_loss=0.2641, pruned_loss=0.04282, over 18473.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2562, pruned_loss=0.03908, over 3593156.56 frames. ], batch size: 59, lr: 5.97e-03, grad_scale: 8.0 +2023-03-09 11:39:16,691 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66816.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 11:39:22,136 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.16 vs. limit=5.0 +2023-03-09 11:39:32,694 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9966, 5.3764, 2.6567, 5.1943, 5.0935, 5.3893, 5.1884, 2.6029], + device='cuda:1'), covar=tensor([0.0192, 0.0081, 0.0820, 0.0075, 0.0068, 0.0071, 0.0090, 0.1013], + device='cuda:1'), in_proj_covar=tensor([0.0086, 0.0079, 0.0094, 0.0093, 0.0083, 0.0073, 0.0083, 0.0095], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 11:39:54,055 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66848.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:40:11,327 INFO [train.py:898] (1/4) Epoch 19, batch 1450, loss[loss=0.1613, simple_loss=0.2492, pruned_loss=0.03666, over 18377.00 frames. ], tot_loss[loss=0.1673, simple_loss=0.2566, pruned_loss=0.03904, over 3601359.56 frames. ], batch size: 50, lr: 5.97e-03, grad_scale: 8.0 +2023-03-09 11:40:12,681 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66864.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:40:21,465 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.935e+02 2.720e+02 3.294e+02 4.147e+02 8.956e+02, threshold=6.588e+02, percent-clipped=4.0 +2023-03-09 11:40:28,862 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66877.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 11:40:50,962 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8769, 3.8531, 5.0799, 2.8441, 4.4427, 2.6337, 3.1193, 1.8024], + device='cuda:1'), covar=tensor([0.1103, 0.0802, 0.0125, 0.0904, 0.0496, 0.2413, 0.2564, 0.2070], + device='cuda:1'), in_proj_covar=tensor([0.0218, 0.0240, 0.0178, 0.0194, 0.0253, 0.0269, 0.0320, 0.0231], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 11:41:10,286 INFO [train.py:898] (1/4) Epoch 19, batch 1500, loss[loss=0.1448, simple_loss=0.2373, pruned_loss=0.02619, over 18391.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2551, pruned_loss=0.03873, over 3601740.76 frames. ], batch size: 48, lr: 5.96e-03, grad_scale: 8.0 +2023-03-09 11:41:11,810 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66914.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:41:57,113 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7785, 2.4066, 2.7472, 2.8905, 3.3426, 5.1688, 5.0278, 3.7494], + device='cuda:1'), covar=tensor([0.1714, 0.2468, 0.2852, 0.1734, 0.2315, 0.0181, 0.0310, 0.0798], + device='cuda:1'), in_proj_covar=tensor([0.0291, 0.0339, 0.0370, 0.0271, 0.0387, 0.0232, 0.0292, 0.0246], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 11:42:02,739 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66957.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:42:08,434 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66962.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:42:09,391 INFO [train.py:898] (1/4) Epoch 19, batch 1550, loss[loss=0.1409, simple_loss=0.2211, pruned_loss=0.03034, over 18420.00 frames. ], tot_loss[loss=0.1657, simple_loss=0.2548, pruned_loss=0.0383, over 3602672.20 frames. ], batch size: 43, lr: 5.96e-03, grad_scale: 8.0 +2023-03-09 11:42:12,410 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6433, 4.5717, 4.7418, 4.3516, 4.4650, 4.4506, 4.7306, 4.7065], + device='cuda:1'), covar=tensor([0.0090, 0.0096, 0.0079, 0.0144, 0.0101, 0.0202, 0.0100, 0.0131], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0068, 0.0073, 0.0091, 0.0075, 0.0103, 0.0086, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 11:42:18,931 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.827e+02 2.644e+02 3.102e+02 3.654e+02 6.872e+02, threshold=6.204e+02, percent-clipped=2.0 +2023-03-09 11:43:08,126 INFO [train.py:898] (1/4) Epoch 19, batch 1600, loss[loss=0.1622, simple_loss=0.2597, pruned_loss=0.03234, over 18633.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2555, pruned_loss=0.03853, over 3596727.94 frames. ], batch size: 52, lr: 5.96e-03, grad_scale: 8.0 +2023-03-09 11:43:11,032 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-09 11:43:14,654 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67018.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 11:43:44,747 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3671, 5.3409, 4.9706, 5.2903, 5.2754, 4.7395, 5.2301, 4.9935], + device='cuda:1'), covar=tensor([0.0435, 0.0475, 0.1334, 0.0823, 0.0630, 0.0410, 0.0435, 0.1022], + device='cuda:1'), in_proj_covar=tensor([0.0472, 0.0545, 0.0688, 0.0425, 0.0438, 0.0494, 0.0534, 0.0664], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 11:44:06,924 INFO [train.py:898] (1/4) Epoch 19, batch 1650, loss[loss=0.1602, simple_loss=0.2377, pruned_loss=0.04136, over 18175.00 frames. ], tot_loss[loss=0.166, simple_loss=0.255, pruned_loss=0.03853, over 3597895.62 frames. ], batch size: 44, lr: 5.96e-03, grad_scale: 8.0 +2023-03-09 11:44:16,358 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.888e+02 2.665e+02 3.285e+02 3.842e+02 7.636e+02, threshold=6.571e+02, percent-clipped=3.0 +2023-03-09 11:44:31,184 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67083.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:44:36,654 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67087.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 11:45:05,337 INFO [train.py:898] (1/4) Epoch 19, batch 1700, loss[loss=0.168, simple_loss=0.2602, pruned_loss=0.03788, over 18509.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2565, pruned_loss=0.03899, over 3590205.51 frames. ], batch size: 51, lr: 5.95e-03, grad_scale: 8.0 +2023-03-09 11:45:43,269 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67144.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:45:45,678 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-03-09 11:46:00,412 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8229, 4.9122, 4.9533, 4.6736, 4.7209, 4.6804, 5.0039, 5.0411], + device='cuda:1'), covar=tensor([0.0069, 0.0057, 0.0056, 0.0100, 0.0056, 0.0154, 0.0066, 0.0072], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0068, 0.0073, 0.0091, 0.0075, 0.0102, 0.0086, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 11:46:04,394 INFO [train.py:898] (1/4) Epoch 19, batch 1750, loss[loss=0.1783, simple_loss=0.2638, pruned_loss=0.04644, over 17028.00 frames. ], tot_loss[loss=0.1671, simple_loss=0.2563, pruned_loss=0.03898, over 3581103.52 frames. ], batch size: 78, lr: 5.95e-03, grad_scale: 8.0 +2023-03-09 11:46:13,338 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.723e+02 2.728e+02 3.267e+02 4.070e+02 1.061e+03, threshold=6.534e+02, percent-clipped=5.0 +2023-03-09 11:46:16,613 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7221, 3.1523, 3.9807, 2.7816, 3.6305, 2.5646, 2.7505, 2.2436], + device='cuda:1'), covar=tensor([0.1001, 0.0896, 0.0292, 0.0737, 0.0650, 0.2127, 0.2154, 0.1649], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0236, 0.0176, 0.0191, 0.0250, 0.0264, 0.0314, 0.0227], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 11:46:21,166 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67177.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:46:23,789 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-09 11:47:02,918 INFO [train.py:898] (1/4) Epoch 19, batch 1800, loss[loss=0.1699, simple_loss=0.2496, pruned_loss=0.04514, over 18255.00 frames. ], tot_loss[loss=0.167, simple_loss=0.256, pruned_loss=0.03903, over 3590894.46 frames. ], batch size: 45, lr: 5.95e-03, grad_scale: 8.0 +2023-03-09 11:47:17,310 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67225.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:47:37,877 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.19 vs. limit=5.0 +2023-03-09 11:48:01,509 INFO [train.py:898] (1/4) Epoch 19, batch 1850, loss[loss=0.1606, simple_loss=0.2545, pruned_loss=0.03338, over 18257.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2559, pruned_loss=0.03896, over 3586015.29 frames. ], batch size: 47, lr: 5.95e-03, grad_scale: 8.0 +2023-03-09 11:48:10,593 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.962e+02 2.832e+02 3.331e+02 4.104e+02 8.145e+02, threshold=6.662e+02, percent-clipped=3.0 +2023-03-09 11:48:19,816 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-09 11:49:00,054 INFO [train.py:898] (1/4) Epoch 19, batch 1900, loss[loss=0.1478, simple_loss=0.2275, pruned_loss=0.03402, over 17772.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.2562, pruned_loss=0.03932, over 3574910.35 frames. ], batch size: 39, lr: 5.95e-03, grad_scale: 4.0 +2023-03-09 11:49:00,274 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67313.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:49:06,013 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6145, 3.5431, 3.4471, 3.0402, 3.2930, 2.6852, 2.6131, 3.5360], + device='cuda:1'), covar=tensor([0.0050, 0.0090, 0.0071, 0.0139, 0.0093, 0.0204, 0.0211, 0.0067], + device='cuda:1'), in_proj_covar=tensor([0.0129, 0.0154, 0.0128, 0.0181, 0.0135, 0.0174, 0.0177, 0.0114], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 11:49:58,287 INFO [train.py:898] (1/4) Epoch 19, batch 1950, loss[loss=0.1543, simple_loss=0.2423, pruned_loss=0.03311, over 18264.00 frames. ], tot_loss[loss=0.1673, simple_loss=0.2561, pruned_loss=0.03925, over 3586896.74 frames. ], batch size: 47, lr: 5.94e-03, grad_scale: 4.0 +2023-03-09 11:50:08,471 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.842e+02 2.816e+02 3.321e+02 4.112e+02 1.785e+03, threshold=6.643e+02, percent-clipped=3.0 +2023-03-09 11:50:12,394 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8521, 3.9737, 2.4586, 3.9305, 5.1647, 2.3107, 3.6128, 3.9216], + device='cuda:1'), covar=tensor([0.0175, 0.1307, 0.1739, 0.0714, 0.0076, 0.1525, 0.0837, 0.0713], + device='cuda:1'), in_proj_covar=tensor([0.0159, 0.0267, 0.0203, 0.0196, 0.0119, 0.0182, 0.0216, 0.0225], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 11:50:14,427 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1898, 5.1636, 5.4823, 5.5948, 5.1838, 6.0769, 5.7539, 5.2119], + device='cuda:1'), covar=tensor([0.1035, 0.0652, 0.0773, 0.0697, 0.1374, 0.0725, 0.0682, 0.1719], + device='cuda:1'), in_proj_covar=tensor([0.0353, 0.0277, 0.0300, 0.0299, 0.0326, 0.0414, 0.0273, 0.0404], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:1') +2023-03-09 11:50:26,416 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67387.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:50:31,948 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.78 vs. limit=2.0 +2023-03-09 11:50:57,639 INFO [train.py:898] (1/4) Epoch 19, batch 2000, loss[loss=0.1695, simple_loss=0.2507, pruned_loss=0.04416, over 18547.00 frames. ], tot_loss[loss=0.168, simple_loss=0.257, pruned_loss=0.03951, over 3587706.67 frames. ], batch size: 49, lr: 5.94e-03, grad_scale: 8.0 +2023-03-09 11:51:22,886 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67435.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:51:27,909 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67439.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:51:56,199 INFO [train.py:898] (1/4) Epoch 19, batch 2050, loss[loss=0.1466, simple_loss=0.232, pruned_loss=0.03057, over 18378.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.257, pruned_loss=0.03972, over 3588985.63 frames. ], batch size: 46, lr: 5.94e-03, grad_scale: 8.0 +2023-03-09 11:52:06,259 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.056e+02 2.683e+02 3.184e+02 3.899e+02 7.354e+02, threshold=6.369e+02, percent-clipped=1.0 +2023-03-09 11:52:18,973 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0019, 5.0499, 2.5766, 4.9530, 4.8653, 5.1269, 4.8205, 2.4265], + device='cuda:1'), covar=tensor([0.0180, 0.0086, 0.0817, 0.0095, 0.0074, 0.0079, 0.0125, 0.1112], + device='cuda:1'), in_proj_covar=tensor([0.0086, 0.0079, 0.0094, 0.0093, 0.0083, 0.0074, 0.0084, 0.0096], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 11:52:54,225 INFO [train.py:898] (1/4) Epoch 19, batch 2100, loss[loss=0.1468, simple_loss=0.2295, pruned_loss=0.03211, over 18384.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.2562, pruned_loss=0.03975, over 3586639.57 frames. ], batch size: 42, lr: 5.94e-03, grad_scale: 8.0 +2023-03-09 11:52:54,614 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67513.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:53:52,386 INFO [train.py:898] (1/4) Epoch 19, batch 2150, loss[loss=0.1379, simple_loss=0.2202, pruned_loss=0.02785, over 18499.00 frames. ], tot_loss[loss=0.1666, simple_loss=0.2551, pruned_loss=0.03902, over 3595338.96 frames. ], batch size: 44, lr: 5.93e-03, grad_scale: 8.0 +2023-03-09 11:54:03,265 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.859e+02 2.589e+02 3.256e+02 4.119e+02 1.040e+03, threshold=6.512e+02, percent-clipped=4.0 +2023-03-09 11:54:05,940 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67574.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 11:54:41,322 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 11:54:43,478 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-03-09 11:54:51,804 INFO [train.py:898] (1/4) Epoch 19, batch 2200, loss[loss=0.157, simple_loss=0.2556, pruned_loss=0.02921, over 18561.00 frames. ], tot_loss[loss=0.1664, simple_loss=0.2551, pruned_loss=0.03881, over 3588881.84 frames. ], batch size: 54, lr: 5.93e-03, grad_scale: 8.0 +2023-03-09 11:54:52,075 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67613.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:55:48,946 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67661.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:55:50,989 INFO [train.py:898] (1/4) Epoch 19, batch 2250, loss[loss=0.1785, simple_loss=0.2718, pruned_loss=0.0426, over 18585.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.2549, pruned_loss=0.03876, over 3584688.86 frames. ], batch size: 54, lr: 5.93e-03, grad_scale: 8.0 +2023-03-09 11:56:01,708 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.067e+02 2.672e+02 3.118e+02 3.558e+02 7.247e+02, threshold=6.237e+02, percent-clipped=1.0 +2023-03-09 11:56:07,750 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6500, 2.8674, 4.2723, 3.7246, 2.6858, 4.6398, 3.8823, 2.9709], + device='cuda:1'), covar=tensor([0.0524, 0.1474, 0.0338, 0.0405, 0.1528, 0.0210, 0.0542, 0.0901], + device='cuda:1'), in_proj_covar=tensor([0.0204, 0.0233, 0.0199, 0.0157, 0.0221, 0.0203, 0.0237, 0.0194], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 11:56:09,234 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.27 vs. limit=5.0 +2023-03-09 11:56:11,194 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67680.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:56:25,080 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67692.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:56:39,491 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6938, 3.5002, 4.9659, 3.0336, 4.2477, 2.5723, 3.1126, 1.8093], + device='cuda:1'), covar=tensor([0.1149, 0.0943, 0.0125, 0.0804, 0.0530, 0.2426, 0.2484, 0.2081], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0235, 0.0174, 0.0189, 0.0249, 0.0264, 0.0312, 0.0226], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 11:56:43,320 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.2690, 2.7243, 2.4746, 2.7248, 3.4165, 3.2644, 2.9460, 2.7471], + device='cuda:1'), covar=tensor([0.0163, 0.0255, 0.0605, 0.0405, 0.0181, 0.0153, 0.0390, 0.0346], + device='cuda:1'), in_proj_covar=tensor([0.0137, 0.0131, 0.0163, 0.0156, 0.0129, 0.0113, 0.0152, 0.0153], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 11:56:50,314 INFO [train.py:898] (1/4) Epoch 19, batch 2300, loss[loss=0.1406, simple_loss=0.2227, pruned_loss=0.02925, over 18407.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.2563, pruned_loss=0.03927, over 3590759.22 frames. ], batch size: 43, lr: 5.93e-03, grad_scale: 8.0 +2023-03-09 11:57:20,983 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67739.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:57:23,432 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67741.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:57:37,130 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67753.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 11:57:48,529 INFO [train.py:898] (1/4) Epoch 19, batch 2350, loss[loss=0.1923, simple_loss=0.2795, pruned_loss=0.0525, over 17762.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2561, pruned_loss=0.03915, over 3597985.10 frames. ], batch size: 70, lr: 5.93e-03, grad_scale: 8.0 +2023-03-09 11:57:59,066 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.915e+02 2.613e+02 3.195e+02 3.840e+02 8.434e+02, threshold=6.389e+02, percent-clipped=1.0 +2023-03-09 11:58:16,651 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67787.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:58:47,097 INFO [train.py:898] (1/4) Epoch 19, batch 2400, loss[loss=0.1841, simple_loss=0.2716, pruned_loss=0.04833, over 18477.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2568, pruned_loss=0.03964, over 3590895.65 frames. ], batch size: 53, lr: 5.92e-03, grad_scale: 8.0 +2023-03-09 11:59:45,719 INFO [train.py:898] (1/4) Epoch 19, batch 2450, loss[loss=0.163, simple_loss=0.2572, pruned_loss=0.0344, over 17999.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2566, pruned_loss=0.03941, over 3581212.01 frames. ], batch size: 65, lr: 5.92e-03, grad_scale: 8.0 +2023-03-09 11:59:52,417 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9473, 5.2402, 2.9226, 5.0866, 4.9958, 5.2941, 5.1129, 2.8523], + device='cuda:1'), covar=tensor([0.0217, 0.0071, 0.0703, 0.0091, 0.0066, 0.0068, 0.0082, 0.0933], + device='cuda:1'), in_proj_covar=tensor([0.0087, 0.0079, 0.0094, 0.0094, 0.0083, 0.0074, 0.0083, 0.0096], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 11:59:54,022 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67869.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 11:59:57,187 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.025e+02 2.777e+02 3.368e+02 4.107e+02 6.941e+02, threshold=6.736e+02, percent-clipped=1.0 +2023-03-09 12:00:39,755 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.02 vs. limit=5.0 +2023-03-09 12:00:44,887 INFO [train.py:898] (1/4) Epoch 19, batch 2500, loss[loss=0.1474, simple_loss=0.2291, pruned_loss=0.03281, over 18467.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.2563, pruned_loss=0.03945, over 3571967.72 frames. ], batch size: 44, lr: 5.92e-03, grad_scale: 8.0 +2023-03-09 12:01:43,837 INFO [train.py:898] (1/4) Epoch 19, batch 2550, loss[loss=0.1671, simple_loss=0.2606, pruned_loss=0.03682, over 16015.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2567, pruned_loss=0.03957, over 3576617.48 frames. ], batch size: 94, lr: 5.92e-03, grad_scale: 4.0 +2023-03-09 12:01:56,271 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.852e+02 2.690e+02 3.137e+02 3.674e+02 8.020e+02, threshold=6.273e+02, percent-clipped=2.0 +2023-03-09 12:02:05,963 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67981.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:02:48,102 INFO [train.py:898] (1/4) Epoch 19, batch 2600, loss[loss=0.1951, simple_loss=0.2813, pruned_loss=0.05448, over 18209.00 frames. ], tot_loss[loss=0.1675, simple_loss=0.256, pruned_loss=0.03944, over 3588019.67 frames. ], batch size: 60, lr: 5.91e-03, grad_scale: 4.0 +2023-03-09 12:03:16,215 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68036.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:03:22,960 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68042.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:03:29,646 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68048.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 12:03:47,006 INFO [train.py:898] (1/4) Epoch 19, batch 2650, loss[loss=0.1981, simple_loss=0.2859, pruned_loss=0.05521, over 18040.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.2565, pruned_loss=0.03959, over 3591820.48 frames. ], batch size: 62, lr: 5.91e-03, grad_scale: 4.0 +2023-03-09 12:03:58,872 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.938e+02 2.796e+02 3.306e+02 3.948e+02 6.802e+02, threshold=6.612e+02, percent-clipped=2.0 +2023-03-09 12:04:15,997 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68087.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 12:04:44,750 INFO [train.py:898] (1/4) Epoch 19, batch 2700, loss[loss=0.1482, simple_loss=0.2325, pruned_loss=0.03193, over 18407.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.2564, pruned_loss=0.03942, over 3598131.17 frames. ], batch size: 43, lr: 5.91e-03, grad_scale: 4.0 +2023-03-09 12:05:27,141 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68148.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 12:05:43,850 INFO [train.py:898] (1/4) Epoch 19, batch 2750, loss[loss=0.173, simple_loss=0.2583, pruned_loss=0.04385, over 18400.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2567, pruned_loss=0.03937, over 3603110.58 frames. ], batch size: 48, lr: 5.91e-03, grad_scale: 4.0 +2023-03-09 12:05:51,521 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68169.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:05:55,855 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.532e+02 2.565e+02 3.131e+02 3.754e+02 7.044e+02, threshold=6.262e+02, percent-clipped=1.0 +2023-03-09 12:06:42,976 INFO [train.py:898] (1/4) Epoch 19, batch 2800, loss[loss=0.1671, simple_loss=0.2482, pruned_loss=0.04303, over 18154.00 frames. ], tot_loss[loss=0.1673, simple_loss=0.2563, pruned_loss=0.03913, over 3593895.10 frames. ], batch size: 44, lr: 5.91e-03, grad_scale: 8.0 +2023-03-09 12:06:47,659 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68217.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:07:03,614 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.85 vs. limit=5.0 +2023-03-09 12:07:41,657 INFO [train.py:898] (1/4) Epoch 19, batch 2850, loss[loss=0.1582, simple_loss=0.2541, pruned_loss=0.0311, over 18362.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2561, pruned_loss=0.03884, over 3593151.53 frames. ], batch size: 55, lr: 5.90e-03, grad_scale: 8.0 +2023-03-09 12:07:53,624 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.861e+02 2.563e+02 3.164e+02 3.682e+02 9.840e+02, threshold=6.328e+02, percent-clipped=2.0 +2023-03-09 12:08:32,557 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3872, 5.2617, 5.6065, 5.7423, 5.2811, 6.1532, 5.8256, 5.5063], + device='cuda:1'), covar=tensor([0.1065, 0.0623, 0.0745, 0.0622, 0.1534, 0.0735, 0.0650, 0.1550], + device='cuda:1'), in_proj_covar=tensor([0.0352, 0.0278, 0.0303, 0.0301, 0.0327, 0.0415, 0.0276, 0.0403], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:1') +2023-03-09 12:08:41,530 INFO [train.py:898] (1/4) Epoch 19, batch 2900, loss[loss=0.1853, simple_loss=0.2762, pruned_loss=0.04724, over 17815.00 frames. ], tot_loss[loss=0.1661, simple_loss=0.2555, pruned_loss=0.03836, over 3603507.37 frames. ], batch size: 70, lr: 5.90e-03, grad_scale: 8.0 +2023-03-09 12:09:08,782 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68336.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:09:10,281 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68337.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:09:23,456 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68348.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:09:40,267 INFO [train.py:898] (1/4) Epoch 19, batch 2950, loss[loss=0.1739, simple_loss=0.2688, pruned_loss=0.03952, over 18400.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.2555, pruned_loss=0.03844, over 3604057.86 frames. ], batch size: 52, lr: 5.90e-03, grad_scale: 8.0 +2023-03-09 12:09:46,293 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.2866, 3.2639, 2.1270, 4.0254, 2.8043, 3.7681, 2.1944, 3.4488], + device='cuda:1'), covar=tensor([0.0615, 0.0761, 0.1368, 0.0433, 0.0870, 0.0286, 0.1216, 0.0433], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0225, 0.0188, 0.0279, 0.0192, 0.0261, 0.0203, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 12:09:51,486 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.776e+02 2.615e+02 3.161e+02 3.773e+02 8.205e+02, threshold=6.323e+02, percent-clipped=2.0 +2023-03-09 12:10:04,942 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68384.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:10:19,667 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68396.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:10:23,205 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68399.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:10:39,475 INFO [train.py:898] (1/4) Epoch 19, batch 3000, loss[loss=0.1626, simple_loss=0.2391, pruned_loss=0.04308, over 18501.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2549, pruned_loss=0.03819, over 3601055.86 frames. ], batch size: 44, lr: 5.90e-03, grad_scale: 8.0 +2023-03-09 12:10:39,475 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 12:10:45,976 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0249, 2.9843, 1.8456, 3.5396, 2.5317, 3.2554, 2.0575, 3.1442], + device='cuda:1'), covar=tensor([0.0719, 0.0946, 0.1659, 0.0580, 0.1037, 0.0307, 0.1385, 0.0410], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0225, 0.0189, 0.0281, 0.0192, 0.0262, 0.0204, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 12:10:51,588 INFO [train.py:932] (1/4) Epoch 19, validation: loss=0.1511, simple_loss=0.2509, pruned_loss=0.02564, over 944034.00 frames. +2023-03-09 12:10:51,589 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 12:11:27,563 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68443.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 12:11:47,299 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68460.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:11:50,384 INFO [train.py:898] (1/4) Epoch 19, batch 3050, loss[loss=0.1508, simple_loss=0.2346, pruned_loss=0.03352, over 18565.00 frames. ], tot_loss[loss=0.1657, simple_loss=0.2544, pruned_loss=0.0385, over 3584832.87 frames. ], batch size: 45, lr: 5.90e-03, grad_scale: 8.0 +2023-03-09 12:12:02,235 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.030e+02 2.743e+02 3.169e+02 3.811e+02 7.810e+02, threshold=6.338e+02, percent-clipped=1.0 +2023-03-09 12:12:49,517 INFO [train.py:898] (1/4) Epoch 19, batch 3100, loss[loss=0.2028, simple_loss=0.2821, pruned_loss=0.06177, over 18318.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2542, pruned_loss=0.03828, over 3588334.40 frames. ], batch size: 57, lr: 5.89e-03, grad_scale: 8.0 +2023-03-09 12:13:06,147 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1232, 4.6993, 4.7991, 3.6536, 3.9342, 3.6254, 2.9419, 2.8820], + device='cuda:1'), covar=tensor([0.0172, 0.0115, 0.0075, 0.0266, 0.0268, 0.0221, 0.0663, 0.0729], + device='cuda:1'), in_proj_covar=tensor([0.0068, 0.0058, 0.0061, 0.0067, 0.0087, 0.0066, 0.0077, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 12:13:48,205 INFO [train.py:898] (1/4) Epoch 19, batch 3150, loss[loss=0.1575, simple_loss=0.2391, pruned_loss=0.03788, over 18257.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2545, pruned_loss=0.03841, over 3581324.98 frames. ], batch size: 45, lr: 5.89e-03, grad_scale: 8.0 +2023-03-09 12:13:59,951 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.941e+02 2.741e+02 3.192e+02 3.844e+02 7.803e+02, threshold=6.385e+02, percent-clipped=3.0 +2023-03-09 12:14:05,839 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68578.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:14:11,133 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7315, 3.3755, 4.6508, 2.9163, 4.0806, 2.5054, 2.9562, 1.9376], + device='cuda:1'), covar=tensor([0.1179, 0.1098, 0.0254, 0.0855, 0.0616, 0.2690, 0.2644, 0.2176], + device='cuda:1'), in_proj_covar=tensor([0.0218, 0.0240, 0.0178, 0.0193, 0.0254, 0.0268, 0.0318, 0.0230], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 12:14:46,963 INFO [train.py:898] (1/4) Epoch 19, batch 3200, loss[loss=0.1877, simple_loss=0.2703, pruned_loss=0.05255, over 18279.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2544, pruned_loss=0.03832, over 3581087.19 frames. ], batch size: 60, lr: 5.89e-03, grad_scale: 8.0 +2023-03-09 12:14:55,317 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8707, 4.9166, 4.9721, 4.7373, 4.7287, 4.7335, 5.0846, 5.0695], + device='cuda:1'), covar=tensor([0.0069, 0.0074, 0.0059, 0.0108, 0.0058, 0.0139, 0.0071, 0.0093], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0069, 0.0073, 0.0091, 0.0074, 0.0102, 0.0086, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 12:15:16,406 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68637.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:15:18,755 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68639.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:15:46,051 INFO [train.py:898] (1/4) Epoch 19, batch 3250, loss[loss=0.1705, simple_loss=0.2595, pruned_loss=0.04079, over 18488.00 frames. ], tot_loss[loss=0.166, simple_loss=0.255, pruned_loss=0.03853, over 3582016.76 frames. ], batch size: 59, lr: 5.89e-03, grad_scale: 8.0 +2023-03-09 12:15:57,317 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.558e+02 2.445e+02 3.074e+02 3.780e+02 1.190e+03, threshold=6.148e+02, percent-clipped=4.0 +2023-03-09 12:16:11,637 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68685.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:16:11,859 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68685.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:16:45,039 INFO [train.py:898] (1/4) Epoch 19, batch 3300, loss[loss=0.1616, simple_loss=0.2359, pruned_loss=0.0436, over 18391.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2543, pruned_loss=0.03824, over 3587372.94 frames. ], batch size: 42, lr: 5.88e-03, grad_scale: 8.0 +2023-03-09 12:17:20,693 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68743.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 12:17:23,554 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.76 vs. limit=5.0 +2023-03-09 12:17:24,228 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68746.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:17:34,073 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68755.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:17:43,468 INFO [train.py:898] (1/4) Epoch 19, batch 3350, loss[loss=0.194, simple_loss=0.2732, pruned_loss=0.05743, over 12795.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2556, pruned_loss=0.03855, over 3575721.23 frames. ], batch size: 129, lr: 5.88e-03, grad_scale: 8.0 +2023-03-09 12:17:49,581 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0067, 4.5584, 4.6565, 3.5961, 3.8647, 3.5610, 2.9224, 2.6785], + device='cuda:1'), covar=tensor([0.0198, 0.0134, 0.0074, 0.0288, 0.0304, 0.0232, 0.0688, 0.0791], + device='cuda:1'), in_proj_covar=tensor([0.0067, 0.0057, 0.0060, 0.0067, 0.0087, 0.0065, 0.0075, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 12:17:54,602 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.843e+02 2.741e+02 3.273e+02 4.091e+02 6.319e+02, threshold=6.545e+02, percent-clipped=1.0 +2023-03-09 12:18:05,894 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7082, 3.5281, 4.8576, 4.2520, 3.1622, 2.9101, 4.2167, 5.0930], + device='cuda:1'), covar=tensor([0.0856, 0.1624, 0.0192, 0.0434, 0.1011, 0.1218, 0.0437, 0.0348], + device='cuda:1'), in_proj_covar=tensor([0.0145, 0.0270, 0.0145, 0.0179, 0.0189, 0.0186, 0.0190, 0.0188], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 12:18:12,778 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0632, 4.3354, 2.4907, 4.3390, 5.3394, 2.8264, 3.9405, 4.1618], + device='cuda:1'), covar=tensor([0.0175, 0.0973, 0.1558, 0.0530, 0.0071, 0.1068, 0.0592, 0.0601], + device='cuda:1'), in_proj_covar=tensor([0.0160, 0.0265, 0.0202, 0.0194, 0.0121, 0.0180, 0.0215, 0.0224], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 12:18:16,630 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68791.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 12:18:42,916 INFO [train.py:898] (1/4) Epoch 19, batch 3400, loss[loss=0.1739, simple_loss=0.2693, pruned_loss=0.03928, over 17754.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2557, pruned_loss=0.0387, over 3578348.42 frames. ], batch size: 70, lr: 5.88e-03, grad_scale: 8.0 +2023-03-09 12:19:03,742 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4494, 5.9693, 5.5166, 5.7204, 5.4771, 5.3568, 5.9853, 5.9311], + device='cuda:1'), covar=tensor([0.1307, 0.0773, 0.0437, 0.0731, 0.1597, 0.0729, 0.0608, 0.0734], + device='cuda:1'), in_proj_covar=tensor([0.0605, 0.0516, 0.0378, 0.0545, 0.0735, 0.0536, 0.0735, 0.0558], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 12:19:12,480 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9504, 3.9412, 4.0135, 3.8398, 3.8986, 3.9131, 4.0652, 4.0909], + device='cuda:1'), covar=tensor([0.0090, 0.0086, 0.0073, 0.0111, 0.0073, 0.0127, 0.0092, 0.0095], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0069, 0.0073, 0.0092, 0.0075, 0.0103, 0.0087, 0.0087], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 12:19:41,760 INFO [train.py:898] (1/4) Epoch 19, batch 3450, loss[loss=0.1989, simple_loss=0.2852, pruned_loss=0.05634, over 12367.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2559, pruned_loss=0.03928, over 3553121.24 frames. ], batch size: 130, lr: 5.88e-03, grad_scale: 8.0 +2023-03-09 12:19:53,567 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.993e+02 2.660e+02 3.004e+02 3.762e+02 7.210e+02, threshold=6.009e+02, percent-clipped=0.0 +2023-03-09 12:20:40,017 INFO [train.py:898] (1/4) Epoch 19, batch 3500, loss[loss=0.1288, simple_loss=0.2116, pruned_loss=0.02299, over 18069.00 frames. ], tot_loss[loss=0.1664, simple_loss=0.2551, pruned_loss=0.0388, over 3571885.92 frames. ], batch size: 40, lr: 5.88e-03, grad_scale: 8.0 +2023-03-09 12:21:04,398 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68934.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:21:35,932 INFO [train.py:898] (1/4) Epoch 19, batch 3550, loss[loss=0.1376, simple_loss=0.2205, pruned_loss=0.02732, over 18405.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2546, pruned_loss=0.03862, over 3581400.97 frames. ], batch size: 42, lr: 5.87e-03, grad_scale: 8.0 +2023-03-09 12:21:46,793 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.911e+02 2.828e+02 3.238e+02 3.903e+02 1.381e+03, threshold=6.476e+02, percent-clipped=5.0 +2023-03-09 12:21:57,393 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1968, 5.1066, 5.4051, 5.4324, 5.1213, 6.0164, 5.6301, 5.2675], + device='cuda:1'), covar=tensor([0.1079, 0.0688, 0.0805, 0.0721, 0.1497, 0.0758, 0.0674, 0.1917], + device='cuda:1'), in_proj_covar=tensor([0.0354, 0.0282, 0.0306, 0.0305, 0.0332, 0.0419, 0.0279, 0.0410], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:1') +2023-03-09 12:21:58,560 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9088, 4.9685, 5.0697, 4.7515, 4.7697, 4.7808, 5.1490, 5.1194], + device='cuda:1'), covar=tensor([0.0071, 0.0068, 0.0054, 0.0100, 0.0061, 0.0150, 0.0066, 0.0087], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0068, 0.0072, 0.0091, 0.0074, 0.0101, 0.0086, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 12:22:30,454 INFO [train.py:898] (1/4) Epoch 19, batch 3600, loss[loss=0.1469, simple_loss=0.2286, pruned_loss=0.03266, over 18418.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2548, pruned_loss=0.03865, over 3583757.51 frames. ], batch size: 42, lr: 5.87e-03, grad_scale: 8.0 +2023-03-09 12:22:53,164 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6563, 2.4971, 2.4150, 2.6564, 3.0245, 3.6988, 3.6725, 3.1284], + device='cuda:1'), covar=tensor([0.1616, 0.2160, 0.2921, 0.1757, 0.2019, 0.0457, 0.0571, 0.0758], + device='cuda:1'), in_proj_covar=tensor([0.0296, 0.0343, 0.0377, 0.0275, 0.0390, 0.0237, 0.0294, 0.0251], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 12:22:59,546 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69041.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:23:01,676 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69043.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:23:34,698 INFO [train.py:898] (1/4) Epoch 20, batch 0, loss[loss=0.1879, simple_loss=0.2735, pruned_loss=0.05118, over 16146.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2735, pruned_loss=0.05118, over 16146.00 frames. ], batch size: 94, lr: 5.72e-03, grad_scale: 8.0 +2023-03-09 12:23:34,699 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 12:23:39,399 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7989, 3.4510, 4.7656, 4.3130, 3.4385, 3.0369, 4.2527, 4.9561], + device='cuda:1'), covar=tensor([0.0790, 0.1509, 0.0194, 0.0373, 0.0871, 0.1198, 0.0409, 0.0220], + device='cuda:1'), in_proj_covar=tensor([0.0145, 0.0268, 0.0145, 0.0178, 0.0189, 0.0185, 0.0189, 0.0189], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 12:23:46,324 INFO [train.py:932] (1/4) Epoch 20, validation: loss=0.1509, simple_loss=0.2512, pruned_loss=0.02534, over 944034.00 frames. +2023-03-09 12:23:46,325 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 12:23:47,753 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4169, 5.3803, 5.0459, 5.2948, 5.3330, 4.7823, 5.2639, 5.0119], + device='cuda:1'), covar=tensor([0.0391, 0.0475, 0.1280, 0.0825, 0.0616, 0.0396, 0.0423, 0.1130], + device='cuda:1'), in_proj_covar=tensor([0.0466, 0.0534, 0.0677, 0.0418, 0.0435, 0.0485, 0.0529, 0.0657], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 12:23:56,094 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69055.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:24:17,097 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.770e+02 2.674e+02 3.193e+02 4.211e+02 7.931e+02, threshold=6.386e+02, percent-clipped=3.0 +2023-03-09 12:24:18,696 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5068, 3.8301, 5.0903, 4.2228, 2.7749, 2.7200, 4.3110, 5.3322], + device='cuda:1'), covar=tensor([0.0957, 0.1356, 0.0150, 0.0436, 0.1136, 0.1276, 0.0397, 0.0153], + device='cuda:1'), in_proj_covar=tensor([0.0146, 0.0268, 0.0145, 0.0178, 0.0189, 0.0186, 0.0189, 0.0189], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 12:24:40,634 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.61 vs. limit=2.0 +2023-03-09 12:24:45,726 INFO [train.py:898] (1/4) Epoch 20, batch 50, loss[loss=0.1369, simple_loss=0.2204, pruned_loss=0.02666, over 18508.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2588, pruned_loss=0.03958, over 814407.91 frames. ], batch size: 44, lr: 5.72e-03, grad_scale: 4.0 +2023-03-09 12:24:49,073 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-09 12:24:50,035 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.61 vs. limit=2.0 +2023-03-09 12:24:52,746 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69103.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:24:54,052 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69104.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:25:37,786 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.23 vs. limit=5.0 +2023-03-09 12:25:44,574 INFO [train.py:898] (1/4) Epoch 20, batch 100, loss[loss=0.175, simple_loss=0.2712, pruned_loss=0.03939, over 18482.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2575, pruned_loss=0.03977, over 1440170.67 frames. ], batch size: 53, lr: 5.72e-03, grad_scale: 4.0 +2023-03-09 12:26:01,933 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69162.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:26:07,066 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9577, 3.2927, 4.4544, 4.0116, 3.0839, 4.9009, 4.1533, 3.2840], + device='cuda:1'), covar=tensor([0.0449, 0.1260, 0.0295, 0.0377, 0.1301, 0.0179, 0.0521, 0.0867], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0238, 0.0202, 0.0157, 0.0222, 0.0207, 0.0241, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 12:26:09,349 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69168.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:26:15,826 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.993e+02 2.622e+02 3.145e+02 3.671e+02 8.108e+02, threshold=6.290e+02, percent-clipped=1.0 +2023-03-09 12:26:42,960 INFO [train.py:898] (1/4) Epoch 20, batch 150, loss[loss=0.1908, simple_loss=0.2783, pruned_loss=0.05168, over 18127.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.2563, pruned_loss=0.03947, over 1913255.50 frames. ], batch size: 62, lr: 5.71e-03, grad_scale: 4.0 +2023-03-09 12:27:14,045 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69223.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:27:16,498 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69225.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:27:21,186 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69229.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:27:26,738 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69234.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:27:34,901 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6911, 3.2367, 4.4001, 3.7850, 2.7511, 4.7649, 4.0325, 3.1729], + device='cuda:1'), covar=tensor([0.0525, 0.1232, 0.0290, 0.0408, 0.1477, 0.0164, 0.0508, 0.0849], + device='cuda:1'), in_proj_covar=tensor([0.0209, 0.0237, 0.0200, 0.0157, 0.0221, 0.0206, 0.0240, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 12:27:40,476 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69245.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 12:27:42,422 INFO [train.py:898] (1/4) Epoch 20, batch 200, loss[loss=0.1531, simple_loss=0.2397, pruned_loss=0.03328, over 18543.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.2561, pruned_loss=0.03935, over 2279198.08 frames. ], batch size: 49, lr: 5.71e-03, grad_scale: 4.0 +2023-03-09 12:27:46,230 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5767, 3.5537, 4.8919, 4.3620, 3.2078, 2.9114, 4.3641, 5.1004], + device='cuda:1'), covar=tensor([0.0844, 0.1496, 0.0179, 0.0353, 0.0944, 0.1199, 0.0351, 0.0191], + device='cuda:1'), in_proj_covar=tensor([0.0146, 0.0269, 0.0145, 0.0179, 0.0190, 0.0186, 0.0190, 0.0190], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 12:28:07,096 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6553, 4.7334, 4.7876, 4.4625, 4.5515, 4.5347, 4.8670, 4.8473], + device='cuda:1'), covar=tensor([0.0086, 0.0070, 0.0062, 0.0124, 0.0067, 0.0152, 0.0076, 0.0100], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0068, 0.0072, 0.0091, 0.0074, 0.0102, 0.0085, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 12:28:13,635 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69273.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:28:14,466 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.067e+02 2.703e+02 3.198e+02 3.752e+02 7.537e+02, threshold=6.397e+02, percent-clipped=4.0 +2023-03-09 12:28:23,450 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69282.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:28:28,137 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69286.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:28:41,466 INFO [train.py:898] (1/4) Epoch 20, batch 250, loss[loss=0.1521, simple_loss=0.2409, pruned_loss=0.03161, over 18381.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.2556, pruned_loss=0.03894, over 2576617.25 frames. ], batch size: 46, lr: 5.71e-03, grad_scale: 4.0 +2023-03-09 12:28:52,670 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69306.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 12:29:01,478 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2243, 5.7166, 5.3709, 5.5442, 5.3457, 5.1915, 5.8354, 5.7193], + device='cuda:1'), covar=tensor([0.1165, 0.0911, 0.0582, 0.0741, 0.1522, 0.0749, 0.0589, 0.0788], + device='cuda:1'), in_proj_covar=tensor([0.0605, 0.0517, 0.0380, 0.0543, 0.0731, 0.0536, 0.0733, 0.0558], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 12:29:08,296 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69320.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:29:24,470 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69334.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 12:29:32,157 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69341.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:29:39,827 INFO [train.py:898] (1/4) Epoch 20, batch 300, loss[loss=0.1754, simple_loss=0.2665, pruned_loss=0.04221, over 18201.00 frames. ], tot_loss[loss=0.1671, simple_loss=0.2559, pruned_loss=0.03915, over 2803374.13 frames. ], batch size: 60, lr: 5.71e-03, grad_scale: 4.0 +2023-03-09 12:30:04,278 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69368.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:30:10,767 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.810e+02 2.793e+02 3.155e+02 4.197e+02 1.182e+03, threshold=6.310e+02, percent-clipped=4.0 +2023-03-09 12:30:13,436 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9995, 5.3228, 2.6422, 5.1184, 5.0326, 5.3069, 5.1477, 2.6907], + device='cuda:1'), covar=tensor([0.0206, 0.0075, 0.0845, 0.0082, 0.0075, 0.0085, 0.0086, 0.1024], + device='cuda:1'), in_proj_covar=tensor([0.0087, 0.0081, 0.0096, 0.0096, 0.0085, 0.0075, 0.0084, 0.0098], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 12:30:19,690 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69381.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:30:28,848 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69389.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:30:34,549 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69394.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:30:37,581 INFO [train.py:898] (1/4) Epoch 20, batch 350, loss[loss=0.1517, simple_loss=0.2357, pruned_loss=0.03388, over 18493.00 frames. ], tot_loss[loss=0.1666, simple_loss=0.2553, pruned_loss=0.03898, over 2965752.98 frames. ], batch size: 47, lr: 5.71e-03, grad_scale: 4.0 +2023-03-09 12:30:40,735 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69399.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:31:16,127 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69429.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:31:36,527 INFO [train.py:898] (1/4) Epoch 20, batch 400, loss[loss=0.1411, simple_loss=0.225, pruned_loss=0.02861, over 18234.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2555, pruned_loss=0.03903, over 3095538.07 frames. ], batch size: 45, lr: 5.70e-03, grad_scale: 8.0 +2023-03-09 12:31:47,158 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69455.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:32:00,622 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6558, 3.6269, 2.3088, 4.5039, 3.1468, 4.4171, 2.4715, 4.0732], + device='cuda:1'), covar=tensor([0.0651, 0.0758, 0.1345, 0.0488, 0.0848, 0.0381, 0.1228, 0.0383], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0223, 0.0188, 0.0281, 0.0193, 0.0262, 0.0203, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 12:32:08,927 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.671e+02 2.636e+02 3.143e+02 3.755e+02 6.655e+02, threshold=6.287e+02, percent-clipped=1.0 +2023-03-09 12:32:35,607 INFO [train.py:898] (1/4) Epoch 20, batch 450, loss[loss=0.1685, simple_loss=0.2514, pruned_loss=0.04282, over 18291.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2547, pruned_loss=0.03856, over 3211924.88 frames. ], batch size: 49, lr: 5.70e-03, grad_scale: 8.0 +2023-03-09 12:33:00,429 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69518.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:33:07,382 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69524.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:33:33,753 INFO [train.py:898] (1/4) Epoch 20, batch 500, loss[loss=0.1603, simple_loss=0.2506, pruned_loss=0.03504, over 18308.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.255, pruned_loss=0.03833, over 3280130.78 frames. ], batch size: 54, lr: 5.70e-03, grad_scale: 8.0 +2023-03-09 12:34:05,765 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.904e+02 2.776e+02 3.162e+02 3.774e+02 8.469e+02, threshold=6.325e+02, percent-clipped=1.0 +2023-03-09 12:34:14,389 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69581.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:34:27,252 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2151, 5.5991, 2.9303, 5.4547, 5.3123, 5.6488, 5.5225, 3.0579], + device='cuda:1'), covar=tensor([0.0165, 0.0078, 0.0706, 0.0061, 0.0070, 0.0067, 0.0068, 0.0859], + device='cuda:1'), in_proj_covar=tensor([0.0086, 0.0080, 0.0094, 0.0095, 0.0084, 0.0074, 0.0082, 0.0096], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 12:34:32,474 INFO [train.py:898] (1/4) Epoch 20, batch 550, loss[loss=0.1553, simple_loss=0.2463, pruned_loss=0.03212, over 18490.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2555, pruned_loss=0.03829, over 3352672.97 frames. ], batch size: 51, lr: 5.70e-03, grad_scale: 4.0 +2023-03-09 12:34:37,525 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69601.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 12:35:00,085 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0305, 3.8041, 5.1776, 2.9749, 4.4715, 2.7283, 3.2322, 1.8822], + device='cuda:1'), covar=tensor([0.1073, 0.0871, 0.0176, 0.0901, 0.0515, 0.2484, 0.2760, 0.2172], + device='cuda:1'), in_proj_covar=tensor([0.0217, 0.0241, 0.0179, 0.0193, 0.0253, 0.0267, 0.0318, 0.0231], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 12:35:09,950 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69629.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 12:35:31,660 INFO [train.py:898] (1/4) Epoch 20, batch 600, loss[loss=0.1727, simple_loss=0.2626, pruned_loss=0.04141, over 18629.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.2553, pruned_loss=0.03816, over 3405786.29 frames. ], batch size: 52, lr: 5.69e-03, grad_scale: 4.0 +2023-03-09 12:35:37,186 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.96 vs. limit=5.0 +2023-03-09 12:35:41,742 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-09 12:36:04,120 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69674.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 12:36:04,879 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.123e+02 2.772e+02 3.241e+02 3.888e+02 6.801e+02, threshold=6.482e+02, percent-clipped=2.0 +2023-03-09 12:36:06,289 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69676.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:36:29,579 INFO [train.py:898] (1/4) Epoch 20, batch 650, loss[loss=0.1758, simple_loss=0.2576, pruned_loss=0.04701, over 12505.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2547, pruned_loss=0.03811, over 3445067.49 frames. ], batch size: 129, lr: 5.69e-03, grad_scale: 4.0 +2023-03-09 12:36:32,676 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69699.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:37:01,618 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69724.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:37:13,987 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69735.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 12:37:28,027 INFO [train.py:898] (1/4) Epoch 20, batch 700, loss[loss=0.1455, simple_loss=0.2345, pruned_loss=0.02822, over 18342.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2545, pruned_loss=0.03812, over 3476501.06 frames. ], batch size: 46, lr: 5.69e-03, grad_scale: 4.0 +2023-03-09 12:37:28,162 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69747.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:37:31,613 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69750.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:38:00,236 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.022e+02 2.614e+02 3.096e+02 3.753e+02 7.667e+02, threshold=6.192e+02, percent-clipped=2.0 +2023-03-09 12:38:26,161 INFO [train.py:898] (1/4) Epoch 20, batch 750, loss[loss=0.1597, simple_loss=0.2579, pruned_loss=0.03074, over 18504.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2542, pruned_loss=0.03788, over 3506713.73 frames. ], batch size: 53, lr: 5.69e-03, grad_scale: 4.0 +2023-03-09 12:38:50,780 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69818.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:38:58,128 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69824.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:39:09,051 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9842, 4.9513, 4.6237, 4.9445, 4.9107, 4.3238, 4.8340, 4.5561], + device='cuda:1'), covar=tensor([0.0412, 0.0491, 0.1243, 0.0654, 0.0595, 0.0432, 0.0416, 0.1086], + device='cuda:1'), in_proj_covar=tensor([0.0474, 0.0541, 0.0684, 0.0421, 0.0437, 0.0488, 0.0532, 0.0664], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 12:39:25,629 INFO [train.py:898] (1/4) Epoch 20, batch 800, loss[loss=0.1645, simple_loss=0.2452, pruned_loss=0.04188, over 18373.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.2552, pruned_loss=0.03817, over 3519003.93 frames. ], batch size: 46, lr: 5.69e-03, grad_scale: 8.0 +2023-03-09 12:39:39,224 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6821, 3.5788, 2.3741, 4.5013, 3.1848, 4.4294, 2.6438, 4.1783], + device='cuda:1'), covar=tensor([0.0581, 0.0716, 0.1241, 0.0495, 0.0849, 0.0367, 0.1098, 0.0351], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0223, 0.0187, 0.0280, 0.0191, 0.0260, 0.0202, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 12:39:48,074 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69866.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:39:55,154 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69872.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:39:58,462 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.014e+02 2.750e+02 3.144e+02 3.939e+02 8.664e+02, threshold=6.288e+02, percent-clipped=4.0 +2023-03-09 12:40:06,020 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69881.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:40:23,200 INFO [train.py:898] (1/4) Epoch 20, batch 850, loss[loss=0.1579, simple_loss=0.2495, pruned_loss=0.03313, over 18534.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2548, pruned_loss=0.03811, over 3535140.40 frames. ], batch size: 49, lr: 5.68e-03, grad_scale: 8.0 +2023-03-09 12:40:28,214 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69901.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 12:41:01,251 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69929.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:41:01,323 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69929.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 12:41:19,184 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5887, 3.4518, 2.1522, 4.3878, 3.0148, 4.3145, 2.6320, 3.9784], + device='cuda:1'), covar=tensor([0.0556, 0.0790, 0.1446, 0.0424, 0.0867, 0.0317, 0.1093, 0.0385], + device='cuda:1'), in_proj_covar=tensor([0.0211, 0.0223, 0.0188, 0.0280, 0.0192, 0.0260, 0.0202, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 12:41:21,121 INFO [train.py:898] (1/4) Epoch 20, batch 900, loss[loss=0.1598, simple_loss=0.2519, pruned_loss=0.03385, over 18354.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2542, pruned_loss=0.03792, over 3540886.97 frames. ], batch size: 56, lr: 5.68e-03, grad_scale: 8.0 +2023-03-09 12:41:23,623 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69949.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 12:41:45,217 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6978, 2.8739, 2.6527, 3.0337, 3.7467, 3.6302, 3.2877, 2.9871], + device='cuda:1'), covar=tensor([0.0177, 0.0231, 0.0534, 0.0347, 0.0153, 0.0142, 0.0338, 0.0309], + device='cuda:1'), in_proj_covar=tensor([0.0138, 0.0131, 0.0161, 0.0154, 0.0127, 0.0113, 0.0151, 0.0154], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 12:41:54,377 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.882e+02 2.754e+02 3.235e+02 3.952e+02 9.067e+02, threshold=6.470e+02, percent-clipped=4.0 +2023-03-09 12:41:55,757 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69976.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:41:56,853 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69977.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:42:19,880 INFO [train.py:898] (1/4) Epoch 20, batch 950, loss[loss=0.1735, simple_loss=0.2686, pruned_loss=0.03915, over 16968.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.2544, pruned_loss=0.03787, over 3555498.95 frames. ], batch size: 78, lr: 5.68e-03, grad_scale: 8.0 +2023-03-09 12:42:56,936 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70024.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:42:57,097 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70024.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:43:04,206 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70030.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 12:43:19,604 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2751, 5.2724, 4.9039, 5.2165, 5.1968, 4.6320, 5.1253, 4.8260], + device='cuda:1'), covar=tensor([0.0408, 0.0392, 0.1270, 0.0699, 0.0551, 0.0374, 0.0391, 0.1006], + device='cuda:1'), in_proj_covar=tensor([0.0472, 0.0537, 0.0680, 0.0420, 0.0434, 0.0485, 0.0528, 0.0658], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 12:43:23,745 INFO [train.py:898] (1/4) Epoch 20, batch 1000, loss[loss=0.158, simple_loss=0.246, pruned_loss=0.03496, over 18284.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2548, pruned_loss=0.0379, over 3560419.54 frames. ], batch size: 49, lr: 5.68e-03, grad_scale: 8.0 +2023-03-09 12:43:27,326 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70050.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:43:52,919 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70072.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:43:56,168 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.914e+02 2.640e+02 3.091e+02 3.737e+02 6.684e+02, threshold=6.182e+02, percent-clipped=1.0 +2023-03-09 12:44:21,991 INFO [train.py:898] (1/4) Epoch 20, batch 1050, loss[loss=0.1691, simple_loss=0.2627, pruned_loss=0.03775, over 17226.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2554, pruned_loss=0.03827, over 3570728.95 frames. ], batch size: 78, lr: 5.68e-03, grad_scale: 8.0 +2023-03-09 12:44:23,278 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70098.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:44:37,214 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70110.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:45:05,291 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-03-09 12:45:20,006 INFO [train.py:898] (1/4) Epoch 20, batch 1100, loss[loss=0.1527, simple_loss=0.2343, pruned_loss=0.03554, over 18376.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.2542, pruned_loss=0.03797, over 3582923.75 frames. ], batch size: 42, lr: 5.67e-03, grad_scale: 8.0 +2023-03-09 12:45:48,188 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70171.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:45:52,310 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.767e+02 2.847e+02 3.446e+02 4.067e+02 6.954e+02, threshold=6.891e+02, percent-clipped=3.0 +2023-03-09 12:46:17,938 INFO [train.py:898] (1/4) Epoch 20, batch 1150, loss[loss=0.1409, simple_loss=0.2285, pruned_loss=0.02668, over 17682.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2546, pruned_loss=0.0383, over 3587786.17 frames. ], batch size: 39, lr: 5.67e-03, grad_scale: 8.0 +2023-03-09 12:46:35,432 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3951, 3.2778, 3.2482, 2.9598, 3.1705, 2.6021, 2.5711, 3.3207], + device='cuda:1'), covar=tensor([0.0059, 0.0094, 0.0066, 0.0120, 0.0083, 0.0181, 0.0193, 0.0072], + device='cuda:1'), in_proj_covar=tensor([0.0135, 0.0158, 0.0130, 0.0185, 0.0141, 0.0179, 0.0181, 0.0117], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 12:47:05,770 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9465, 5.3674, 2.8136, 5.1755, 5.0984, 5.3952, 5.2001, 2.6578], + device='cuda:1'), covar=tensor([0.0193, 0.0072, 0.0749, 0.0075, 0.0071, 0.0064, 0.0084, 0.1022], + device='cuda:1'), in_proj_covar=tensor([0.0086, 0.0079, 0.0094, 0.0094, 0.0083, 0.0074, 0.0082, 0.0095], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 12:47:08,745 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8015, 3.6587, 4.9893, 2.7765, 4.2907, 2.6556, 3.0928, 1.8984], + device='cuda:1'), covar=tensor([0.1141, 0.0866, 0.0137, 0.0961, 0.0548, 0.2386, 0.2534, 0.2021], + device='cuda:1'), in_proj_covar=tensor([0.0217, 0.0241, 0.0180, 0.0193, 0.0256, 0.0268, 0.0320, 0.0231], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 12:47:16,754 INFO [train.py:898] (1/4) Epoch 20, batch 1200, loss[loss=0.1492, simple_loss=0.2292, pruned_loss=0.03464, over 18409.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2542, pruned_loss=0.03816, over 3589990.04 frames. ], batch size: 43, lr: 5.67e-03, grad_scale: 8.0 +2023-03-09 12:47:25,314 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-09 12:47:30,667 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-03-09 12:47:31,554 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70260.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:47:48,457 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6552, 2.7822, 2.6395, 2.9553, 3.6650, 3.6279, 3.1263, 3.0453], + device='cuda:1'), covar=tensor([0.0161, 0.0263, 0.0497, 0.0366, 0.0184, 0.0151, 0.0385, 0.0350], + device='cuda:1'), in_proj_covar=tensor([0.0139, 0.0133, 0.0161, 0.0156, 0.0129, 0.0114, 0.0152, 0.0154], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 12:47:49,160 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.495e+02 2.757e+02 3.193e+02 3.861e+02 7.458e+02, threshold=6.386e+02, percent-clipped=1.0 +2023-03-09 12:48:15,300 INFO [train.py:898] (1/4) Epoch 20, batch 1250, loss[loss=0.1577, simple_loss=0.2526, pruned_loss=0.03137, over 18374.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2545, pruned_loss=0.0384, over 3576582.94 frames. ], batch size: 52, lr: 5.67e-03, grad_scale: 8.0 +2023-03-09 12:48:21,506 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-03-09 12:48:42,629 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70321.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:48:44,432 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9744, 3.7116, 4.9583, 4.3478, 3.3963, 2.9789, 4.4517, 5.1784], + device='cuda:1'), covar=tensor([0.0751, 0.1422, 0.0216, 0.0410, 0.0910, 0.1237, 0.0383, 0.0298], + device='cuda:1'), in_proj_covar=tensor([0.0147, 0.0270, 0.0146, 0.0179, 0.0189, 0.0187, 0.0191, 0.0191], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 12:48:53,762 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70330.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 12:49:13,038 INFO [train.py:898] (1/4) Epoch 20, batch 1300, loss[loss=0.1471, simple_loss=0.2324, pruned_loss=0.0309, over 18285.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2544, pruned_loss=0.03843, over 3586025.33 frames. ], batch size: 47, lr: 5.67e-03, grad_scale: 8.0 +2023-03-09 12:49:44,657 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.016e+02 2.769e+02 3.249e+02 3.833e+02 9.851e+02, threshold=6.498e+02, percent-clipped=4.0 +2023-03-09 12:49:48,779 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70378.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 12:50:05,184 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.61 vs. limit=2.0 +2023-03-09 12:50:10,105 INFO [train.py:898] (1/4) Epoch 20, batch 1350, loss[loss=0.1548, simple_loss=0.2322, pruned_loss=0.03873, over 17547.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2554, pruned_loss=0.03879, over 3590087.06 frames. ], batch size: 39, lr: 5.66e-03, grad_scale: 8.0 +2023-03-09 12:51:08,591 INFO [train.py:898] (1/4) Epoch 20, batch 1400, loss[loss=0.1735, simple_loss=0.2684, pruned_loss=0.0393, over 16244.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.2556, pruned_loss=0.03843, over 3593821.97 frames. ], batch size: 94, lr: 5.66e-03, grad_scale: 8.0 +2023-03-09 12:51:31,227 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70466.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:51:41,139 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.979e+02 2.850e+02 3.137e+02 3.892e+02 8.751e+02, threshold=6.275e+02, percent-clipped=3.0 +2023-03-09 12:52:06,377 INFO [train.py:898] (1/4) Epoch 20, batch 1450, loss[loss=0.1424, simple_loss=0.2238, pruned_loss=0.03054, over 18430.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.255, pruned_loss=0.03833, over 3600685.68 frames. ], batch size: 43, lr: 5.66e-03, grad_scale: 8.0 +2023-03-09 12:52:18,692 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6373, 4.2142, 4.1716, 3.1558, 3.5143, 3.2551, 2.5307, 2.5748], + device='cuda:1'), covar=tensor([0.0210, 0.0124, 0.0094, 0.0324, 0.0328, 0.0230, 0.0709, 0.0715], + device='cuda:1'), in_proj_covar=tensor([0.0069, 0.0058, 0.0061, 0.0068, 0.0089, 0.0066, 0.0077, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 12:53:04,439 INFO [train.py:898] (1/4) Epoch 20, batch 1500, loss[loss=0.1424, simple_loss=0.2282, pruned_loss=0.02825, over 18391.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2546, pruned_loss=0.03831, over 3594835.96 frames. ], batch size: 42, lr: 5.66e-03, grad_scale: 4.0 +2023-03-09 12:53:38,228 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.836e+02 2.609e+02 3.412e+02 4.380e+02 8.286e+02, threshold=6.824e+02, percent-clipped=3.0 +2023-03-09 12:53:42,471 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.99 vs. limit=5.0 +2023-03-09 12:54:03,312 INFO [train.py:898] (1/4) Epoch 20, batch 1550, loss[loss=0.174, simple_loss=0.2651, pruned_loss=0.04143, over 17448.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2545, pruned_loss=0.03827, over 3599270.15 frames. ], batch size: 78, lr: 5.66e-03, grad_scale: 4.0 +2023-03-09 12:54:25,706 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70616.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:54:28,158 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5396, 3.4877, 4.6573, 4.0869, 3.2681, 2.7145, 4.1482, 5.0005], + device='cuda:1'), covar=tensor([0.0899, 0.1535, 0.0259, 0.0456, 0.0931, 0.1306, 0.0420, 0.0195], + device='cuda:1'), in_proj_covar=tensor([0.0149, 0.0274, 0.0150, 0.0182, 0.0192, 0.0190, 0.0195, 0.0195], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 12:55:02,178 INFO [train.py:898] (1/4) Epoch 20, batch 1600, loss[loss=0.1435, simple_loss=0.2228, pruned_loss=0.03207, over 17648.00 frames. ], tot_loss[loss=0.1657, simple_loss=0.2545, pruned_loss=0.0384, over 3578751.98 frames. ], batch size: 39, lr: 5.65e-03, grad_scale: 8.0 +2023-03-09 12:55:35,594 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.948e+02 2.737e+02 3.118e+02 3.818e+02 6.822e+02, threshold=6.236e+02, percent-clipped=0.0 +2023-03-09 12:55:59,469 INFO [train.py:898] (1/4) Epoch 20, batch 1650, loss[loss=0.1563, simple_loss=0.2486, pruned_loss=0.03204, over 18278.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2541, pruned_loss=0.0382, over 3581372.03 frames. ], batch size: 49, lr: 5.65e-03, grad_scale: 8.0 +2023-03-09 12:56:57,257 INFO [train.py:898] (1/4) Epoch 20, batch 1700, loss[loss=0.1741, simple_loss=0.2714, pruned_loss=0.03844, over 17755.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2549, pruned_loss=0.03859, over 3573246.45 frames. ], batch size: 70, lr: 5.65e-03, grad_scale: 8.0 +2023-03-09 12:57:20,533 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70766.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:57:31,225 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.987e+02 2.775e+02 3.401e+02 4.011e+02 8.447e+02, threshold=6.802e+02, percent-clipped=3.0 +2023-03-09 12:57:38,805 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.6407, 5.6173, 5.2588, 5.5525, 5.5569, 4.9530, 5.4594, 5.2121], + device='cuda:1'), covar=tensor([0.0361, 0.0339, 0.1194, 0.0756, 0.0568, 0.0387, 0.0359, 0.0981], + device='cuda:1'), in_proj_covar=tensor([0.0477, 0.0544, 0.0691, 0.0429, 0.0440, 0.0495, 0.0536, 0.0666], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 12:57:55,548 INFO [train.py:898] (1/4) Epoch 20, batch 1750, loss[loss=0.1689, simple_loss=0.2589, pruned_loss=0.0394, over 18492.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.2539, pruned_loss=0.03813, over 3580673.14 frames. ], batch size: 51, lr: 5.65e-03, grad_scale: 8.0 +2023-03-09 12:58:07,200 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0910, 4.2391, 2.5418, 4.0531, 5.2834, 2.7060, 3.8853, 4.0930], + device='cuda:1'), covar=tensor([0.0146, 0.1139, 0.1610, 0.0665, 0.0072, 0.1191, 0.0671, 0.0689], + device='cuda:1'), in_proj_covar=tensor([0.0160, 0.0264, 0.0201, 0.0192, 0.0122, 0.0180, 0.0211, 0.0222], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 12:58:16,241 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70814.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:58:31,645 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9773, 5.1339, 2.3310, 4.9969, 4.8425, 5.1584, 4.8522, 2.6738], + device='cuda:1'), covar=tensor([0.0186, 0.0077, 0.0898, 0.0076, 0.0065, 0.0070, 0.0101, 0.0983], + device='cuda:1'), in_proj_covar=tensor([0.0086, 0.0080, 0.0095, 0.0095, 0.0084, 0.0075, 0.0084, 0.0096], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 12:58:46,111 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-09 12:58:54,610 INFO [train.py:898] (1/4) Epoch 20, batch 1800, loss[loss=0.1536, simple_loss=0.2415, pruned_loss=0.03289, over 18263.00 frames. ], tot_loss[loss=0.1661, simple_loss=0.2551, pruned_loss=0.03858, over 3579628.04 frames. ], batch size: 47, lr: 5.65e-03, grad_scale: 8.0 +2023-03-09 12:59:28,906 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.702e+02 2.663e+02 3.014e+02 3.631e+02 8.036e+02, threshold=6.028e+02, percent-clipped=1.0 +2023-03-09 12:59:36,365 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6387, 2.2223, 2.5300, 2.6134, 3.1722, 4.8588, 4.7230, 3.4788], + device='cuda:1'), covar=tensor([0.1719, 0.2432, 0.2878, 0.1889, 0.2320, 0.0204, 0.0335, 0.0839], + device='cuda:1'), in_proj_covar=tensor([0.0295, 0.0344, 0.0377, 0.0276, 0.0385, 0.0239, 0.0293, 0.0250], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 12:59:41,854 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-09 12:59:53,193 INFO [train.py:898] (1/4) Epoch 20, batch 1850, loss[loss=0.1445, simple_loss=0.2241, pruned_loss=0.03248, over 18162.00 frames. ], tot_loss[loss=0.1666, simple_loss=0.2556, pruned_loss=0.03884, over 3581759.13 frames. ], batch size: 44, lr: 5.64e-03, grad_scale: 8.0 +2023-03-09 13:00:16,582 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9728, 5.1036, 5.1191, 4.7851, 4.8242, 4.8528, 5.1330, 5.1925], + device='cuda:1'), covar=tensor([0.0068, 0.0057, 0.0051, 0.0104, 0.0051, 0.0140, 0.0067, 0.0084], + device='cuda:1'), in_proj_covar=tensor([0.0094, 0.0070, 0.0073, 0.0092, 0.0075, 0.0104, 0.0086, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 13:00:16,596 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70916.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:00:51,682 INFO [train.py:898] (1/4) Epoch 20, batch 1900, loss[loss=0.1956, simple_loss=0.2822, pruned_loss=0.05453, over 12324.00 frames. ], tot_loss[loss=0.1664, simple_loss=0.2554, pruned_loss=0.03874, over 3574268.91 frames. ], batch size: 129, lr: 5.64e-03, grad_scale: 8.0 +2023-03-09 13:00:59,059 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4074, 2.6566, 2.4282, 2.7589, 3.4831, 3.3520, 2.9765, 2.7770], + device='cuda:1'), covar=tensor([0.0213, 0.0303, 0.0588, 0.0392, 0.0199, 0.0196, 0.0393, 0.0389], + device='cuda:1'), in_proj_covar=tensor([0.0139, 0.0130, 0.0159, 0.0154, 0.0127, 0.0113, 0.0149, 0.0153], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 13:01:09,032 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.30 vs. limit=5.0 +2023-03-09 13:01:11,885 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70964.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:01:26,017 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.590e+02 2.797e+02 3.325e+02 3.887e+02 8.370e+02, threshold=6.650e+02, percent-clipped=5.0 +2023-03-09 13:01:44,830 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70992.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:01:50,081 INFO [train.py:898] (1/4) Epoch 20, batch 1950, loss[loss=0.1891, simple_loss=0.2765, pruned_loss=0.05087, over 18271.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.2549, pruned_loss=0.03835, over 3587723.21 frames. ], batch size: 57, lr: 5.64e-03, grad_scale: 8.0 +2023-03-09 13:02:28,836 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71030.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:02:47,806 INFO [train.py:898] (1/4) Epoch 20, batch 2000, loss[loss=0.1911, simple_loss=0.2772, pruned_loss=0.05249, over 18231.00 frames. ], tot_loss[loss=0.1657, simple_loss=0.2546, pruned_loss=0.03838, over 3586250.18 frames. ], batch size: 60, lr: 5.64e-03, grad_scale: 8.0 +2023-03-09 13:02:54,848 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71053.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:03:06,848 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3242, 5.7971, 5.3980, 5.6220, 5.3617, 5.2807, 5.8668, 5.7513], + device='cuda:1'), covar=tensor([0.1152, 0.0847, 0.0533, 0.0681, 0.1489, 0.0707, 0.0589, 0.0786], + device='cuda:1'), in_proj_covar=tensor([0.0611, 0.0530, 0.0387, 0.0550, 0.0739, 0.0540, 0.0743, 0.0564], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 13:03:21,385 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.116e+02 2.863e+02 3.373e+02 3.932e+02 6.166e+02, threshold=6.746e+02, percent-clipped=0.0 +2023-03-09 13:03:39,897 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71091.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:03:44,443 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3889, 5.3868, 5.0285, 5.3116, 5.2768, 4.6894, 5.2082, 4.9470], + device='cuda:1'), covar=tensor([0.0423, 0.0371, 0.1212, 0.0753, 0.0518, 0.0412, 0.0423, 0.1038], + device='cuda:1'), in_proj_covar=tensor([0.0484, 0.0549, 0.0701, 0.0435, 0.0445, 0.0504, 0.0539, 0.0677], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 13:03:46,421 INFO [train.py:898] (1/4) Epoch 20, batch 2050, loss[loss=0.1863, simple_loss=0.2793, pruned_loss=0.04665, over 18359.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2545, pruned_loss=0.03805, over 3595603.60 frames. ], batch size: 56, lr: 5.64e-03, grad_scale: 8.0 +2023-03-09 13:04:00,359 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2422, 5.2398, 4.8838, 5.1754, 5.1299, 4.5616, 5.0683, 4.8091], + device='cuda:1'), covar=tensor([0.0438, 0.0430, 0.1324, 0.0763, 0.0608, 0.0433, 0.0440, 0.1171], + device='cuda:1'), in_proj_covar=tensor([0.0484, 0.0549, 0.0700, 0.0434, 0.0445, 0.0503, 0.0539, 0.0677], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 13:04:33,298 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 13:04:43,710 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0001, 3.9534, 5.2021, 4.6436, 3.4351, 3.3649, 4.7656, 5.5016], + device='cuda:1'), covar=tensor([0.0716, 0.1311, 0.0208, 0.0323, 0.0900, 0.0970, 0.0292, 0.0189], + device='cuda:1'), in_proj_covar=tensor([0.0146, 0.0270, 0.0147, 0.0178, 0.0189, 0.0188, 0.0192, 0.0192], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 13:04:45,465 INFO [train.py:898] (1/4) Epoch 20, batch 2100, loss[loss=0.176, simple_loss=0.2671, pruned_loss=0.0424, over 18028.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2547, pruned_loss=0.03816, over 3586827.26 frames. ], batch size: 65, lr: 5.63e-03, grad_scale: 8.0 +2023-03-09 13:05:14,033 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9168, 5.4306, 2.6040, 5.2207, 5.0740, 5.4352, 5.2336, 2.6815], + device='cuda:1'), covar=tensor([0.0234, 0.0067, 0.0914, 0.0082, 0.0077, 0.0075, 0.0096, 0.1102], + device='cuda:1'), in_proj_covar=tensor([0.0087, 0.0080, 0.0095, 0.0095, 0.0084, 0.0075, 0.0084, 0.0097], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 13:05:19,432 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.825e+02 2.628e+02 3.039e+02 3.790e+02 1.100e+03, threshold=6.078e+02, percent-clipped=2.0 +2023-03-09 13:05:44,229 INFO [train.py:898] (1/4) Epoch 20, batch 2150, loss[loss=0.1744, simple_loss=0.2655, pruned_loss=0.04167, over 18618.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2543, pruned_loss=0.03805, over 3584673.60 frames. ], batch size: 52, lr: 5.63e-03, grad_scale: 8.0 +2023-03-09 13:06:43,285 INFO [train.py:898] (1/4) Epoch 20, batch 2200, loss[loss=0.1763, simple_loss=0.2708, pruned_loss=0.04088, over 17949.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2533, pruned_loss=0.0375, over 3585484.21 frames. ], batch size: 65, lr: 5.63e-03, grad_scale: 8.0 +2023-03-09 13:07:04,118 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5550, 2.8746, 2.5762, 2.9615, 3.6211, 3.5002, 3.1401, 2.9255], + device='cuda:1'), covar=tensor([0.0185, 0.0262, 0.0534, 0.0350, 0.0170, 0.0168, 0.0314, 0.0382], + device='cuda:1'), in_proj_covar=tensor([0.0137, 0.0130, 0.0159, 0.0153, 0.0127, 0.0113, 0.0148, 0.0152], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 13:07:06,200 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.5872, 4.5915, 4.6026, 4.3925, 4.4285, 4.4136, 4.7128, 4.7364], + device='cuda:1'), covar=tensor([0.0086, 0.0084, 0.0082, 0.0126, 0.0068, 0.0152, 0.0084, 0.0106], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0069, 0.0072, 0.0092, 0.0074, 0.0103, 0.0086, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 13:07:16,428 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.980e+02 2.743e+02 3.244e+02 3.938e+02 1.174e+03, threshold=6.489e+02, percent-clipped=4.0 +2023-03-09 13:07:41,310 INFO [train.py:898] (1/4) Epoch 20, batch 2250, loss[loss=0.1679, simple_loss=0.2562, pruned_loss=0.0398, over 18498.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2531, pruned_loss=0.03728, over 3593747.19 frames. ], batch size: 47, lr: 5.63e-03, grad_scale: 8.0 +2023-03-09 13:07:51,071 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5148, 3.0445, 4.4344, 3.7005, 2.7966, 4.6195, 3.9273, 2.7497], + device='cuda:1'), covar=tensor([0.0551, 0.1316, 0.0292, 0.0429, 0.1456, 0.0203, 0.0508, 0.1049], + device='cuda:1'), in_proj_covar=tensor([0.0208, 0.0235, 0.0205, 0.0158, 0.0223, 0.0207, 0.0241, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 13:08:40,030 INFO [train.py:898] (1/4) Epoch 20, batch 2300, loss[loss=0.1675, simple_loss=0.2576, pruned_loss=0.03871, over 15992.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2536, pruned_loss=0.03768, over 3585719.47 frames. ], batch size: 94, lr: 5.63e-03, grad_scale: 8.0 +2023-03-09 13:08:41,363 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71348.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:08:53,561 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-09 13:09:13,542 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.760e+02 2.586e+02 3.152e+02 3.675e+02 6.468e+02, threshold=6.303e+02, percent-clipped=0.0 +2023-03-09 13:09:17,749 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1069, 5.0939, 5.3322, 5.2513, 5.0869, 5.8850, 5.4821, 5.1298], + device='cuda:1'), covar=tensor([0.1105, 0.0718, 0.0774, 0.0810, 0.1370, 0.0722, 0.0837, 0.1850], + device='cuda:1'), in_proj_covar=tensor([0.0358, 0.0284, 0.0309, 0.0306, 0.0331, 0.0419, 0.0282, 0.0414], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 13:09:25,916 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71386.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:09:38,248 INFO [train.py:898] (1/4) Epoch 20, batch 2350, loss[loss=0.211, simple_loss=0.3063, pruned_loss=0.05782, over 18309.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2547, pruned_loss=0.03805, over 3593259.41 frames. ], batch size: 57, lr: 5.62e-03, grad_scale: 8.0 +2023-03-09 13:10:37,016 INFO [train.py:898] (1/4) Epoch 20, batch 2400, loss[loss=0.1747, simple_loss=0.2728, pruned_loss=0.03826, over 18275.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.254, pruned_loss=0.03751, over 3609596.38 frames. ], batch size: 57, lr: 5.62e-03, grad_scale: 8.0 +2023-03-09 13:11:10,786 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.729e+02 2.680e+02 3.169e+02 3.609e+02 6.256e+02, threshold=6.338e+02, percent-clipped=0.0 +2023-03-09 13:11:35,514 INFO [train.py:898] (1/4) Epoch 20, batch 2450, loss[loss=0.1503, simple_loss=0.2367, pruned_loss=0.03192, over 17625.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.2544, pruned_loss=0.03764, over 3583232.30 frames. ], batch size: 39, lr: 5.62e-03, grad_scale: 8.0 +2023-03-09 13:12:33,799 INFO [train.py:898] (1/4) Epoch 20, batch 2500, loss[loss=0.1821, simple_loss=0.2743, pruned_loss=0.04496, over 17338.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2548, pruned_loss=0.03789, over 3589596.24 frames. ], batch size: 78, lr: 5.62e-03, grad_scale: 8.0 +2023-03-09 13:13:07,753 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.564e+02 2.655e+02 3.143e+02 3.839e+02 8.943e+02, threshold=6.287e+02, percent-clipped=3.0 +2023-03-09 13:13:10,399 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6148, 3.5829, 2.3620, 4.4396, 3.1338, 4.3395, 2.5683, 4.0467], + device='cuda:1'), covar=tensor([0.0619, 0.0808, 0.1409, 0.0443, 0.0806, 0.0310, 0.1189, 0.0386], + device='cuda:1'), in_proj_covar=tensor([0.0214, 0.0226, 0.0189, 0.0283, 0.0193, 0.0264, 0.0203, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 13:13:32,181 INFO [train.py:898] (1/4) Epoch 20, batch 2550, loss[loss=0.1844, simple_loss=0.2722, pruned_loss=0.04825, over 16992.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.255, pruned_loss=0.0379, over 3595859.12 frames. ], batch size: 78, lr: 5.62e-03, grad_scale: 8.0 +2023-03-09 13:13:38,485 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8122, 3.1002, 4.4608, 3.9818, 2.8847, 4.8488, 4.1875, 3.1594], + device='cuda:1'), covar=tensor([0.0455, 0.1291, 0.0302, 0.0395, 0.1498, 0.0217, 0.0489, 0.0911], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0238, 0.0207, 0.0162, 0.0226, 0.0212, 0.0245, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 13:13:55,641 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71616.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:14:11,027 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4555, 5.2176, 5.7411, 5.7747, 5.4113, 6.2855, 5.9480, 5.5959], + device='cuda:1'), covar=tensor([0.1142, 0.0687, 0.0702, 0.0663, 0.1451, 0.0745, 0.0646, 0.1779], + device='cuda:1'), in_proj_covar=tensor([0.0359, 0.0287, 0.0311, 0.0308, 0.0331, 0.0420, 0.0283, 0.0417], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 13:14:31,308 INFO [train.py:898] (1/4) Epoch 20, batch 2600, loss[loss=0.15, simple_loss=0.2253, pruned_loss=0.03729, over 17611.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2537, pruned_loss=0.03736, over 3603901.05 frames. ], batch size: 39, lr: 5.62e-03, grad_scale: 8.0 +2023-03-09 13:14:32,723 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=71648.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:15:05,045 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.778e+02 2.596e+02 2.940e+02 3.683e+02 1.082e+03, threshold=5.881e+02, percent-clipped=3.0 +2023-03-09 13:15:06,419 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71677.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:15:16,482 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=71686.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:15:19,517 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-09 13:15:27,380 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.81 vs. limit=5.0 +2023-03-09 13:15:28,097 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71696.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:15:29,060 INFO [train.py:898] (1/4) Epoch 20, batch 2650, loss[loss=0.1591, simple_loss=0.2559, pruned_loss=0.03111, over 18490.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2531, pruned_loss=0.03722, over 3606884.87 frames. ], batch size: 51, lr: 5.61e-03, grad_scale: 8.0 +2023-03-09 13:15:36,031 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-09 13:15:44,720 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3375, 5.3445, 4.9957, 5.3079, 5.2552, 4.6726, 5.1360, 4.9398], + device='cuda:1'), covar=tensor([0.0425, 0.0410, 0.1098, 0.0661, 0.0566, 0.0394, 0.0416, 0.0988], + device='cuda:1'), in_proj_covar=tensor([0.0490, 0.0549, 0.0698, 0.0431, 0.0446, 0.0498, 0.0538, 0.0674], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 13:15:58,263 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.3983, 1.9833, 2.0175, 2.1213, 2.4624, 2.3963, 2.3390, 2.1059], + device='cuda:1'), covar=tensor([0.0232, 0.0230, 0.0485, 0.0414, 0.0225, 0.0210, 0.0394, 0.0317], + device='cuda:1'), in_proj_covar=tensor([0.0140, 0.0131, 0.0161, 0.0154, 0.0128, 0.0114, 0.0150, 0.0153], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 13:16:12,124 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71734.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:16:22,088 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6890, 2.4712, 2.7509, 2.8197, 3.4187, 5.0278, 4.8686, 3.5895], + device='cuda:1'), covar=tensor([0.1782, 0.2295, 0.2910, 0.1767, 0.2095, 0.0198, 0.0356, 0.0852], + device='cuda:1'), in_proj_covar=tensor([0.0296, 0.0343, 0.0377, 0.0275, 0.0386, 0.0238, 0.0293, 0.0251], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 13:16:27,159 INFO [train.py:898] (1/4) Epoch 20, batch 2700, loss[loss=0.1396, simple_loss=0.229, pruned_loss=0.02512, over 18271.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2538, pruned_loss=0.0375, over 3602194.55 frames. ], batch size: 45, lr: 5.61e-03, grad_scale: 8.0 +2023-03-09 13:17:00,512 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5455, 3.5344, 3.4183, 3.1135, 3.3355, 2.7918, 2.7435, 3.5929], + device='cuda:1'), covar=tensor([0.0058, 0.0083, 0.0075, 0.0126, 0.0089, 0.0174, 0.0187, 0.0058], + device='cuda:1'), in_proj_covar=tensor([0.0135, 0.0154, 0.0129, 0.0182, 0.0138, 0.0175, 0.0178, 0.0116], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 13:17:01,229 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.908e+02 2.794e+02 3.196e+02 3.940e+02 6.442e+02, threshold=6.393e+02, percent-clipped=2.0 +2023-03-09 13:17:25,386 INFO [train.py:898] (1/4) Epoch 20, batch 2750, loss[loss=0.1865, simple_loss=0.278, pruned_loss=0.04746, over 17939.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2536, pruned_loss=0.03758, over 3605970.79 frames. ], batch size: 70, lr: 5.61e-03, grad_scale: 8.0 +2023-03-09 13:18:23,264 INFO [train.py:898] (1/4) Epoch 20, batch 2800, loss[loss=0.1894, simple_loss=0.279, pruned_loss=0.04993, over 17093.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.2535, pruned_loss=0.0375, over 3610788.73 frames. ], batch size: 78, lr: 5.61e-03, grad_scale: 8.0 +2023-03-09 13:18:26,237 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6461, 4.8900, 4.7653, 4.8826, 4.6279, 5.3377, 5.0158, 4.6854], + device='cuda:1'), covar=tensor([0.1050, 0.0840, 0.0805, 0.0742, 0.1344, 0.0835, 0.0712, 0.1720], + device='cuda:1'), in_proj_covar=tensor([0.0355, 0.0285, 0.0308, 0.0305, 0.0329, 0.0419, 0.0280, 0.0412], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:1') +2023-03-09 13:18:56,842 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.812e+02 2.512e+02 3.220e+02 3.992e+02 1.001e+03, threshold=6.440e+02, percent-clipped=3.0 +2023-03-09 13:19:22,131 INFO [train.py:898] (1/4) Epoch 20, batch 2850, loss[loss=0.1856, simple_loss=0.2704, pruned_loss=0.0504, over 17778.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.2544, pruned_loss=0.03772, over 3595165.06 frames. ], batch size: 70, lr: 5.61e-03, grad_scale: 8.0 +2023-03-09 13:19:23,741 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7258, 4.0057, 2.4028, 3.9616, 4.9623, 2.5673, 3.7326, 3.8049], + device='cuda:1'), covar=tensor([0.0190, 0.1143, 0.1622, 0.0576, 0.0106, 0.1261, 0.0663, 0.0780], + device='cuda:1'), in_proj_covar=tensor([0.0163, 0.0268, 0.0203, 0.0195, 0.0125, 0.0182, 0.0214, 0.0224], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 13:19:49,021 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71919.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:20:21,474 INFO [train.py:898] (1/4) Epoch 20, batch 2900, loss[loss=0.1436, simple_loss=0.2304, pruned_loss=0.02847, over 17744.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2539, pruned_loss=0.03758, over 3602555.11 frames. ], batch size: 39, lr: 5.60e-03, grad_scale: 8.0 +2023-03-09 13:20:27,073 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8312, 4.5608, 4.6353, 3.3655, 3.7537, 3.4085, 2.6339, 2.5673], + device='cuda:1'), covar=tensor([0.0221, 0.0148, 0.0079, 0.0332, 0.0367, 0.0245, 0.0751, 0.0857], + device='cuda:1'), in_proj_covar=tensor([0.0070, 0.0059, 0.0062, 0.0068, 0.0089, 0.0066, 0.0077, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 13:20:41,452 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4274, 5.2208, 5.6228, 5.7319, 5.2934, 6.1568, 5.8159, 5.4121], + device='cuda:1'), covar=tensor([0.1020, 0.0659, 0.0693, 0.0615, 0.1322, 0.0642, 0.0639, 0.1496], + device='cuda:1'), in_proj_covar=tensor([0.0354, 0.0281, 0.0305, 0.0302, 0.0325, 0.0415, 0.0277, 0.0408], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:1') +2023-03-09 13:20:51,391 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71972.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:20:55,782 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.736e+02 2.545e+02 2.901e+02 3.371e+02 5.685e+02, threshold=5.802e+02, percent-clipped=0.0 +2023-03-09 13:21:00,701 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71980.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:21:14,220 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0207, 5.1283, 5.1379, 4.8404, 4.8461, 4.7871, 5.2306, 5.1891], + device='cuda:1'), covar=tensor([0.0061, 0.0057, 0.0063, 0.0099, 0.0061, 0.0153, 0.0066, 0.0092], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0070, 0.0073, 0.0092, 0.0075, 0.0104, 0.0086, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 13:21:20,629 INFO [train.py:898] (1/4) Epoch 20, batch 2950, loss[loss=0.1746, simple_loss=0.272, pruned_loss=0.03859, over 18584.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2534, pruned_loss=0.03721, over 3597572.56 frames. ], batch size: 54, lr: 5.60e-03, grad_scale: 8.0 +2023-03-09 13:21:23,641 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.90 vs. limit=5.0 +2023-03-09 13:21:46,450 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.57 vs. limit=2.0 +2023-03-09 13:22:21,550 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7021, 4.3977, 4.3785, 3.1911, 3.5650, 3.3298, 2.3330, 2.2725], + device='cuda:1'), covar=tensor([0.0228, 0.0151, 0.0075, 0.0350, 0.0374, 0.0249, 0.0838, 0.0918], + device='cuda:1'), in_proj_covar=tensor([0.0070, 0.0059, 0.0062, 0.0068, 0.0088, 0.0066, 0.0076, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 13:22:24,557 INFO [train.py:898] (1/4) Epoch 20, batch 3000, loss[loss=0.1782, simple_loss=0.2667, pruned_loss=0.04487, over 18281.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2532, pruned_loss=0.03696, over 3611258.46 frames. ], batch size: 57, lr: 5.60e-03, grad_scale: 8.0 +2023-03-09 13:22:24,557 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 13:22:36,472 INFO [train.py:932] (1/4) Epoch 20, validation: loss=0.1501, simple_loss=0.25, pruned_loss=0.02514, over 944034.00 frames. +2023-03-09 13:22:36,473 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 13:22:40,899 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5836, 2.3643, 2.5409, 2.7322, 3.3618, 4.9840, 4.7834, 3.5918], + device='cuda:1'), covar=tensor([0.1892, 0.2495, 0.2916, 0.1850, 0.2208, 0.0228, 0.0376, 0.0913], + device='cuda:1'), in_proj_covar=tensor([0.0298, 0.0346, 0.0379, 0.0277, 0.0389, 0.0240, 0.0296, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 13:23:10,304 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.792e+02 2.528e+02 3.006e+02 3.462e+02 4.966e+02, threshold=6.013e+02, percent-clipped=0.0 +2023-03-09 13:23:33,868 INFO [train.py:898] (1/4) Epoch 20, batch 3050, loss[loss=0.1787, simple_loss=0.2641, pruned_loss=0.04667, over 16271.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.253, pruned_loss=0.03712, over 3598995.19 frames. ], batch size: 94, lr: 5.60e-03, grad_scale: 8.0 +2023-03-09 13:24:11,983 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72129.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:24:31,887 INFO [train.py:898] (1/4) Epoch 20, batch 3100, loss[loss=0.1391, simple_loss=0.2182, pruned_loss=0.03004, over 18391.00 frames. ], tot_loss[loss=0.1637, simple_loss=0.2531, pruned_loss=0.0372, over 3597089.09 frames. ], batch size: 42, lr: 5.60e-03, grad_scale: 8.0 +2023-03-09 13:24:45,624 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8579, 4.0388, 2.2371, 4.1053, 5.2067, 2.4677, 3.6980, 3.9817], + device='cuda:1'), covar=tensor([0.0185, 0.1190, 0.1754, 0.0588, 0.0073, 0.1276, 0.0723, 0.0704], + device='cuda:1'), in_proj_covar=tensor([0.0163, 0.0268, 0.0203, 0.0194, 0.0124, 0.0182, 0.0214, 0.0223], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 13:24:52,476 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7011, 2.5936, 2.5050, 2.7586, 3.0158, 3.7520, 3.6957, 3.0778], + device='cuda:1'), covar=tensor([0.1546, 0.2012, 0.2658, 0.1572, 0.2060, 0.0399, 0.0526, 0.0809], + device='cuda:1'), in_proj_covar=tensor([0.0295, 0.0342, 0.0376, 0.0274, 0.0384, 0.0238, 0.0292, 0.0251], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 13:25:05,467 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.922e+02 2.879e+02 3.465e+02 4.058e+02 1.741e+03, threshold=6.931e+02, percent-clipped=3.0 +2023-03-09 13:25:11,250 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9682, 5.4062, 5.4559, 5.4711, 4.8913, 5.3623, 4.6570, 5.2903], + device='cuda:1'), covar=tensor([0.0246, 0.0324, 0.0191, 0.0362, 0.0376, 0.0218, 0.1188, 0.0345], + device='cuda:1'), in_proj_covar=tensor([0.0214, 0.0263, 0.0252, 0.0326, 0.0267, 0.0269, 0.0308, 0.0260], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 13:25:22,437 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72190.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:25:29,912 INFO [train.py:898] (1/4) Epoch 20, batch 3150, loss[loss=0.1475, simple_loss=0.2329, pruned_loss=0.03109, over 18422.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2525, pruned_loss=0.03732, over 3583777.50 frames. ], batch size: 42, lr: 5.59e-03, grad_scale: 8.0 +2023-03-09 13:26:28,293 INFO [train.py:898] (1/4) Epoch 20, batch 3200, loss[loss=0.1853, simple_loss=0.2751, pruned_loss=0.0478, over 17813.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2536, pruned_loss=0.03787, over 3569769.36 frames. ], batch size: 70, lr: 5.59e-03, grad_scale: 8.0 +2023-03-09 13:26:58,293 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72272.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:27:01,426 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72275.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:27:02,249 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.897e+02 2.541e+02 3.039e+02 3.727e+02 6.894e+02, threshold=6.078e+02, percent-clipped=0.0 +2023-03-09 13:27:26,960 INFO [train.py:898] (1/4) Epoch 20, batch 3250, loss[loss=0.1424, simple_loss=0.2302, pruned_loss=0.02734, over 18249.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2535, pruned_loss=0.03779, over 3569912.95 frames. ], batch size: 45, lr: 5.59e-03, grad_scale: 8.0 +2023-03-09 13:27:54,549 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72320.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:28:01,539 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72326.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:28:26,307 INFO [train.py:898] (1/4) Epoch 20, batch 3300, loss[loss=0.1484, simple_loss=0.2324, pruned_loss=0.0322, over 18145.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2533, pruned_loss=0.03754, over 3568421.64 frames. ], batch size: 44, lr: 5.59e-03, grad_scale: 8.0 +2023-03-09 13:28:59,549 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.671e+02 2.585e+02 3.093e+02 3.745e+02 6.095e+02, threshold=6.186e+02, percent-clipped=1.0 +2023-03-09 13:29:12,943 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72387.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:29:24,275 INFO [train.py:898] (1/4) Epoch 20, batch 3350, loss[loss=0.1645, simple_loss=0.2589, pruned_loss=0.03504, over 18620.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2531, pruned_loss=0.03724, over 3576063.92 frames. ], batch size: 52, lr: 5.59e-03, grad_scale: 8.0 +2023-03-09 13:30:23,821 INFO [train.py:898] (1/4) Epoch 20, batch 3400, loss[loss=0.1845, simple_loss=0.2754, pruned_loss=0.0468, over 18482.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2533, pruned_loss=0.03725, over 3565879.67 frames. ], batch size: 59, lr: 5.58e-03, grad_scale: 8.0 +2023-03-09 13:30:34,224 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6029, 4.1540, 4.1347, 3.1973, 3.4850, 3.2072, 2.5835, 2.3545], + device='cuda:1'), covar=tensor([0.0209, 0.0150, 0.0099, 0.0333, 0.0324, 0.0243, 0.0691, 0.0840], + device='cuda:1'), in_proj_covar=tensor([0.0069, 0.0058, 0.0061, 0.0067, 0.0087, 0.0065, 0.0076, 0.0082], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 13:30:57,199 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.915e+02 2.809e+02 3.353e+02 3.945e+02 1.222e+03, threshold=6.706e+02, percent-clipped=5.0 +2023-03-09 13:31:07,748 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72485.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:31:22,241 INFO [train.py:898] (1/4) Epoch 20, batch 3450, loss[loss=0.154, simple_loss=0.2462, pruned_loss=0.03095, over 18281.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2533, pruned_loss=0.03724, over 3581437.05 frames. ], batch size: 49, lr: 5.58e-03, grad_scale: 8.0 +2023-03-09 13:32:08,526 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3704, 2.7056, 2.4330, 2.8087, 3.5231, 3.3758, 3.0341, 2.8957], + device='cuda:1'), covar=tensor([0.0189, 0.0282, 0.0597, 0.0385, 0.0195, 0.0191, 0.0382, 0.0342], + device='cuda:1'), in_proj_covar=tensor([0.0141, 0.0131, 0.0162, 0.0156, 0.0129, 0.0115, 0.0152, 0.0155], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 13:32:20,079 INFO [train.py:898] (1/4) Epoch 20, batch 3500, loss[loss=0.1626, simple_loss=0.2543, pruned_loss=0.03541, over 18123.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.253, pruned_loss=0.03729, over 3586014.94 frames. ], batch size: 62, lr: 5.58e-03, grad_scale: 16.0 +2023-03-09 13:32:52,148 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72575.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:32:53,021 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.775e+02 2.575e+02 2.979e+02 3.509e+02 6.316e+02, threshold=5.957e+02, percent-clipped=0.0 +2023-03-09 13:33:16,608 INFO [train.py:898] (1/4) Epoch 20, batch 3550, loss[loss=0.1729, simple_loss=0.2548, pruned_loss=0.04554, over 18549.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.2534, pruned_loss=0.03759, over 3588133.40 frames. ], batch size: 49, lr: 5.58e-03, grad_scale: 16.0 +2023-03-09 13:33:45,203 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72623.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:34:10,342 INFO [train.py:898] (1/4) Epoch 20, batch 3600, loss[loss=0.1701, simple_loss=0.2601, pruned_loss=0.04008, over 17990.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.2536, pruned_loss=0.03754, over 3583538.85 frames. ], batch size: 65, lr: 5.58e-03, grad_scale: 8.0 +2023-03-09 13:34:26,210 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.53 vs. limit=5.0 +2023-03-09 13:34:42,422 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.959e+02 2.531e+02 3.153e+02 3.638e+02 9.354e+02, threshold=6.307e+02, percent-clipped=0.0 +2023-03-09 13:35:15,798 INFO [train.py:898] (1/4) Epoch 21, batch 0, loss[loss=0.1427, simple_loss=0.2306, pruned_loss=0.02743, over 18354.00 frames. ], tot_loss[loss=0.1427, simple_loss=0.2306, pruned_loss=0.02743, over 18354.00 frames. ], batch size: 46, lr: 5.44e-03, grad_scale: 8.0 +2023-03-09 13:35:15,799 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 13:35:27,495 INFO [train.py:932] (1/4) Epoch 21, validation: loss=0.1511, simple_loss=0.2511, pruned_loss=0.02556, over 944034.00 frames. +2023-03-09 13:35:27,496 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 13:35:28,910 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72682.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:36:23,151 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8730, 4.0420, 2.5075, 4.1340, 5.1591, 2.5474, 3.8415, 3.9876], + device='cuda:1'), covar=tensor([0.0164, 0.1148, 0.1659, 0.0581, 0.0089, 0.1333, 0.0669, 0.0695], + device='cuda:1'), in_proj_covar=tensor([0.0164, 0.0269, 0.0202, 0.0194, 0.0125, 0.0183, 0.0215, 0.0222], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 13:36:26,172 INFO [train.py:898] (1/4) Epoch 21, batch 50, loss[loss=0.1749, simple_loss=0.2601, pruned_loss=0.04484, over 18623.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.2544, pruned_loss=0.03707, over 817268.23 frames. ], batch size: 52, lr: 5.44e-03, grad_scale: 8.0 +2023-03-09 13:37:20,700 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.036e+02 2.565e+02 3.180e+02 3.639e+02 9.362e+02, threshold=6.360e+02, percent-clipped=2.0 +2023-03-09 13:37:25,029 INFO [train.py:898] (1/4) Epoch 21, batch 100, loss[loss=0.1621, simple_loss=0.2519, pruned_loss=0.03616, over 18314.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2532, pruned_loss=0.03759, over 1429111.91 frames. ], batch size: 56, lr: 5.43e-03, grad_scale: 8.0 +2023-03-09 13:37:29,814 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72785.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:37:49,313 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7924, 3.4946, 4.7966, 2.8015, 4.2727, 2.5433, 2.9873, 1.8113], + device='cuda:1'), covar=tensor([0.1178, 0.0975, 0.0207, 0.0949, 0.0516, 0.2573, 0.2590, 0.2157], + device='cuda:1'), in_proj_covar=tensor([0.0220, 0.0244, 0.0186, 0.0196, 0.0257, 0.0271, 0.0324, 0.0234], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 13:38:23,942 INFO [train.py:898] (1/4) Epoch 21, batch 150, loss[loss=0.161, simple_loss=0.2548, pruned_loss=0.03361, over 17204.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2536, pruned_loss=0.03721, over 1909641.25 frames. ], batch size: 78, lr: 5.43e-03, grad_scale: 8.0 +2023-03-09 13:38:26,429 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72833.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:39:17,916 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.843e+02 2.851e+02 3.537e+02 4.282e+02 1.325e+03, threshold=7.073e+02, percent-clipped=5.0 +2023-03-09 13:39:22,598 INFO [train.py:898] (1/4) Epoch 21, batch 200, loss[loss=0.1697, simple_loss=0.2635, pruned_loss=0.03796, over 18308.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2541, pruned_loss=0.03739, over 2276268.64 frames. ], batch size: 54, lr: 5.43e-03, grad_scale: 8.0 +2023-03-09 13:39:37,535 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72894.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:40:04,469 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6857, 5.2424, 5.2056, 5.2020, 4.7446, 5.1466, 4.5780, 5.0938], + device='cuda:1'), covar=tensor([0.0237, 0.0273, 0.0202, 0.0461, 0.0401, 0.0238, 0.1064, 0.0344], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0261, 0.0251, 0.0326, 0.0265, 0.0268, 0.0305, 0.0257], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 13:40:20,985 INFO [train.py:898] (1/4) Epoch 21, batch 250, loss[loss=0.1676, simple_loss=0.2624, pruned_loss=0.03638, over 18350.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2542, pruned_loss=0.03743, over 2563617.87 frames. ], batch size: 55, lr: 5.43e-03, grad_scale: 8.0 +2023-03-09 13:40:46,812 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9592, 4.7841, 4.8539, 3.7431, 4.0105, 3.6724, 2.9668, 2.8333], + device='cuda:1'), covar=tensor([0.0212, 0.0126, 0.0067, 0.0256, 0.0293, 0.0217, 0.0620, 0.0763], + device='cuda:1'), in_proj_covar=tensor([0.0070, 0.0058, 0.0062, 0.0068, 0.0088, 0.0066, 0.0076, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 13:40:48,964 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72955.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:40:59,867 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72964.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 13:41:14,357 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.784e+02 2.579e+02 3.135e+02 3.848e+02 6.941e+02, threshold=6.270e+02, percent-clipped=0.0 +2023-03-09 13:41:18,869 INFO [train.py:898] (1/4) Epoch 21, batch 300, loss[loss=0.1675, simple_loss=0.2603, pruned_loss=0.03738, over 18582.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2527, pruned_loss=0.037, over 2789586.11 frames. ], batch size: 54, lr: 5.43e-03, grad_scale: 8.0 +2023-03-09 13:41:20,181 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72982.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:42:11,321 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73025.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 13:42:16,848 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73030.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:42:17,823 INFO [train.py:898] (1/4) Epoch 21, batch 350, loss[loss=0.1421, simple_loss=0.2281, pruned_loss=0.02802, over 18256.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2521, pruned_loss=0.03673, over 2969487.14 frames. ], batch size: 45, lr: 5.43e-03, grad_scale: 8.0 +2023-03-09 13:43:11,918 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.597e+02 2.497e+02 3.036e+02 3.677e+02 5.898e+02, threshold=6.073e+02, percent-clipped=0.0 +2023-03-09 13:43:16,486 INFO [train.py:898] (1/4) Epoch 21, batch 400, loss[loss=0.1672, simple_loss=0.2591, pruned_loss=0.0376, over 18407.00 frames. ], tot_loss[loss=0.162, simple_loss=0.2516, pruned_loss=0.03625, over 3115861.92 frames. ], batch size: 52, lr: 5.42e-03, grad_scale: 8.0 +2023-03-09 13:43:27,245 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-09 13:44:08,390 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-09 13:44:14,450 INFO [train.py:898] (1/4) Epoch 21, batch 450, loss[loss=0.188, simple_loss=0.2726, pruned_loss=0.05169, over 18356.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2509, pruned_loss=0.03622, over 3225956.47 frames. ], batch size: 56, lr: 5.42e-03, grad_scale: 8.0 +2023-03-09 13:44:15,017 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-03-09 13:44:47,099 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7144, 4.4435, 4.4763, 3.4267, 3.6568, 3.4737, 2.6412, 2.4431], + device='cuda:1'), covar=tensor([0.0236, 0.0165, 0.0070, 0.0309, 0.0341, 0.0225, 0.0741, 0.0861], + device='cuda:1'), in_proj_covar=tensor([0.0070, 0.0059, 0.0062, 0.0069, 0.0089, 0.0066, 0.0077, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 13:44:52,006 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5701, 2.8356, 4.1824, 3.6558, 2.6506, 4.4196, 3.8748, 2.9009], + device='cuda:1'), covar=tensor([0.0504, 0.1411, 0.0279, 0.0405, 0.1486, 0.0212, 0.0597, 0.0984], + device='cuda:1'), in_proj_covar=tensor([0.0206, 0.0235, 0.0207, 0.0161, 0.0220, 0.0208, 0.0243, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 13:45:06,943 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.937e+02 2.746e+02 3.322e+02 3.984e+02 6.045e+02, threshold=6.644e+02, percent-clipped=0.0 +2023-03-09 13:45:12,778 INFO [train.py:898] (1/4) Epoch 21, batch 500, loss[loss=0.1655, simple_loss=0.2554, pruned_loss=0.03774, over 18553.00 frames. ], tot_loss[loss=0.1613, simple_loss=0.2504, pruned_loss=0.03607, over 3320110.92 frames. ], batch size: 54, lr: 5.42e-03, grad_scale: 8.0 +2023-03-09 13:46:10,611 INFO [train.py:898] (1/4) Epoch 21, batch 550, loss[loss=0.1756, simple_loss=0.2723, pruned_loss=0.03946, over 18334.00 frames. ], tot_loss[loss=0.1621, simple_loss=0.2513, pruned_loss=0.03646, over 3374604.62 frames. ], batch size: 56, lr: 5.42e-03, grad_scale: 8.0 +2023-03-09 13:46:32,271 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73250.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:47:03,604 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.734e+02 2.558e+02 3.003e+02 3.637e+02 1.061e+03, threshold=6.005e+02, percent-clipped=3.0 +2023-03-09 13:47:08,047 INFO [train.py:898] (1/4) Epoch 21, batch 600, loss[loss=0.1468, simple_loss=0.2341, pruned_loss=0.0298, over 18371.00 frames. ], tot_loss[loss=0.1629, simple_loss=0.2522, pruned_loss=0.03679, over 3418195.13 frames. ], batch size: 46, lr: 5.42e-03, grad_scale: 8.0 +2023-03-09 13:47:20,254 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8275, 3.6791, 5.0125, 2.7446, 4.3014, 2.5137, 3.0205, 1.7492], + device='cuda:1'), covar=tensor([0.1134, 0.0845, 0.0149, 0.0937, 0.0508, 0.2636, 0.2704, 0.2197], + device='cuda:1'), in_proj_covar=tensor([0.0219, 0.0244, 0.0186, 0.0196, 0.0257, 0.0272, 0.0324, 0.0233], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 13:47:22,457 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73293.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:47:53,076 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73320.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 13:48:06,377 INFO [train.py:898] (1/4) Epoch 21, batch 650, loss[loss=0.17, simple_loss=0.2626, pruned_loss=0.03865, over 18555.00 frames. ], tot_loss[loss=0.1632, simple_loss=0.2527, pruned_loss=0.03684, over 3452517.29 frames. ], batch size: 54, lr: 5.41e-03, grad_scale: 8.0 +2023-03-09 13:48:33,517 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73354.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:49:00,062 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.798e+02 2.707e+02 3.166e+02 4.000e+02 8.066e+02, threshold=6.331e+02, percent-clipped=2.0 +2023-03-09 13:49:04,586 INFO [train.py:898] (1/4) Epoch 21, batch 700, loss[loss=0.1938, simple_loss=0.289, pruned_loss=0.04929, over 18468.00 frames. ], tot_loss[loss=0.1635, simple_loss=0.2529, pruned_loss=0.03704, over 3480695.81 frames. ], batch size: 59, lr: 5.41e-03, grad_scale: 8.0 +2023-03-09 13:49:51,941 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-09 13:50:02,188 INFO [train.py:898] (1/4) Epoch 21, batch 750, loss[loss=0.1795, simple_loss=0.2641, pruned_loss=0.04745, over 12438.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2527, pruned_loss=0.03708, over 3499416.97 frames. ], batch size: 129, lr: 5.41e-03, grad_scale: 8.0 +2023-03-09 13:50:54,399 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.921e+02 2.615e+02 3.165e+02 3.785e+02 6.213e+02, threshold=6.329e+02, percent-clipped=0.0 +2023-03-09 13:50:59,647 INFO [train.py:898] (1/4) Epoch 21, batch 800, loss[loss=0.1636, simple_loss=0.2542, pruned_loss=0.03647, over 17972.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2538, pruned_loss=0.03767, over 3507149.69 frames. ], batch size: 65, lr: 5.41e-03, grad_scale: 8.0 +2023-03-09 13:51:49,109 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8729, 3.7917, 3.6143, 3.3124, 3.5458, 2.9691, 2.9690, 3.8619], + device='cuda:1'), covar=tensor([0.0060, 0.0097, 0.0082, 0.0140, 0.0104, 0.0215, 0.0210, 0.0065], + device='cuda:1'), in_proj_covar=tensor([0.0138, 0.0157, 0.0133, 0.0185, 0.0141, 0.0179, 0.0184, 0.0121], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 13:51:56,992 INFO [train.py:898] (1/4) Epoch 21, batch 850, loss[loss=0.1701, simple_loss=0.2529, pruned_loss=0.04366, over 18533.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2536, pruned_loss=0.03773, over 3524968.41 frames. ], batch size: 49, lr: 5.41e-03, grad_scale: 8.0 +2023-03-09 13:52:20,223 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73550.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:52:30,306 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.16 vs. limit=5.0 +2023-03-09 13:52:50,041 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.633e+02 2.526e+02 3.131e+02 3.569e+02 6.821e+02, threshold=6.262e+02, percent-clipped=1.0 +2023-03-09 13:52:54,610 INFO [train.py:898] (1/4) Epoch 21, batch 900, loss[loss=0.1512, simple_loss=0.2321, pruned_loss=0.03518, over 17723.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2545, pruned_loss=0.03809, over 3540810.85 frames. ], batch size: 39, lr: 5.41e-03, grad_scale: 8.0 +2023-03-09 13:53:06,073 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73590.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:53:15,946 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73598.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:53:40,717 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73620.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 13:53:53,021 INFO [train.py:898] (1/4) Epoch 21, batch 950, loss[loss=0.1542, simple_loss=0.2423, pruned_loss=0.03303, over 18365.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2545, pruned_loss=0.03801, over 3545917.59 frames. ], batch size: 46, lr: 5.40e-03, grad_scale: 8.0 +2023-03-09 13:54:15,629 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73649.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:54:18,620 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73651.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:54:19,877 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8186, 3.7542, 5.0444, 2.9025, 4.4317, 2.6171, 3.0846, 1.8246], + device='cuda:1'), covar=tensor([0.1230, 0.0884, 0.0158, 0.0899, 0.0569, 0.2493, 0.2635, 0.2163], + device='cuda:1'), in_proj_covar=tensor([0.0220, 0.0245, 0.0188, 0.0196, 0.0259, 0.0271, 0.0322, 0.0234], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 13:54:37,481 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73668.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 13:54:47,208 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.873e+02 2.598e+02 2.980e+02 3.796e+02 7.725e+02, threshold=5.960e+02, percent-clipped=2.0 +2023-03-09 13:54:51,743 INFO [train.py:898] (1/4) Epoch 21, batch 1000, loss[loss=0.1541, simple_loss=0.2428, pruned_loss=0.03274, over 18412.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2532, pruned_loss=0.03752, over 3559776.91 frames. ], batch size: 48, lr: 5.40e-03, grad_scale: 8.0 +2023-03-09 13:55:27,156 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9249, 5.0602, 5.0982, 4.7866, 4.8529, 4.8007, 5.1795, 5.1199], + device='cuda:1'), covar=tensor([0.0073, 0.0062, 0.0061, 0.0109, 0.0056, 0.0154, 0.0061, 0.0091], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0069, 0.0074, 0.0092, 0.0074, 0.0104, 0.0086, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 13:55:36,227 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5008, 3.7061, 4.9698, 4.2423, 3.3033, 2.8823, 4.4734, 5.1540], + device='cuda:1'), covar=tensor([0.0846, 0.1239, 0.0164, 0.0403, 0.0906, 0.1165, 0.0353, 0.0236], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0277, 0.0151, 0.0181, 0.0193, 0.0193, 0.0196, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 13:55:49,544 INFO [train.py:898] (1/4) Epoch 21, batch 1050, loss[loss=0.1622, simple_loss=0.2605, pruned_loss=0.03196, over 18269.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2533, pruned_loss=0.03746, over 3563189.62 frames. ], batch size: 57, lr: 5.40e-03, grad_scale: 8.0 +2023-03-09 13:55:50,863 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73732.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:56:43,396 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.657e+02 2.593e+02 3.039e+02 3.732e+02 7.995e+02, threshold=6.078e+02, percent-clipped=2.0 +2023-03-09 13:56:47,971 INFO [train.py:898] (1/4) Epoch 21, batch 1100, loss[loss=0.1893, simple_loss=0.2783, pruned_loss=0.05017, over 18134.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2531, pruned_loss=0.03725, over 3561487.27 frames. ], batch size: 62, lr: 5.40e-03, grad_scale: 8.0 +2023-03-09 13:57:01,690 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73793.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:57:41,782 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-09 13:57:46,777 INFO [train.py:898] (1/4) Epoch 21, batch 1150, loss[loss=0.1602, simple_loss=0.2508, pruned_loss=0.03481, over 16983.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2537, pruned_loss=0.03717, over 3561944.39 frames. ], batch size: 78, lr: 5.40e-03, grad_scale: 8.0 +2023-03-09 13:58:40,425 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.636e+02 2.572e+02 3.032e+02 3.666e+02 7.271e+02, threshold=6.063e+02, percent-clipped=3.0 +2023-03-09 13:58:43,312 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.10 vs. limit=5.0 +2023-03-09 13:58:44,859 INFO [train.py:898] (1/4) Epoch 21, batch 1200, loss[loss=0.1636, simple_loss=0.2503, pruned_loss=0.03842, over 18279.00 frames. ], tot_loss[loss=0.1648, simple_loss=0.2545, pruned_loss=0.03758, over 3565128.58 frames. ], batch size: 49, lr: 5.39e-03, grad_scale: 8.0 +2023-03-09 13:59:20,200 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6706, 3.5968, 4.8996, 4.2779, 3.3333, 2.9209, 4.4236, 5.1727], + device='cuda:1'), covar=tensor([0.0846, 0.1586, 0.0212, 0.0414, 0.0896, 0.1259, 0.0384, 0.0184], + device='cuda:1'), in_proj_covar=tensor([0.0148, 0.0273, 0.0150, 0.0179, 0.0191, 0.0190, 0.0194, 0.0194], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 13:59:35,426 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7632, 3.6740, 3.5478, 3.2438, 3.5454, 2.8068, 2.9003, 3.7003], + device='cuda:1'), covar=tensor([0.0057, 0.0087, 0.0081, 0.0117, 0.0090, 0.0188, 0.0193, 0.0073], + device='cuda:1'), in_proj_covar=tensor([0.0138, 0.0158, 0.0134, 0.0186, 0.0141, 0.0179, 0.0184, 0.0122], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 13:59:42,879 INFO [train.py:898] (1/4) Epoch 21, batch 1250, loss[loss=0.1994, simple_loss=0.2801, pruned_loss=0.05937, over 12550.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.2545, pruned_loss=0.03766, over 3559291.85 frames. ], batch size: 131, lr: 5.39e-03, grad_scale: 8.0 +2023-03-09 14:00:00,223 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73946.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:00:03,731 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73949.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:00:14,563 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5108, 3.1845, 4.3808, 3.6788, 2.8961, 4.6918, 4.0488, 2.9581], + device='cuda:1'), covar=tensor([0.0569, 0.1259, 0.0249, 0.0456, 0.1396, 0.0178, 0.0491, 0.0934], + device='cuda:1'), in_proj_covar=tensor([0.0208, 0.0236, 0.0208, 0.0161, 0.0222, 0.0211, 0.0244, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 14:00:34,605 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.63 vs. limit=5.0 +2023-03-09 14:00:37,351 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.681e+02 2.538e+02 2.943e+02 3.615e+02 7.778e+02, threshold=5.886e+02, percent-clipped=1.0 +2023-03-09 14:00:41,921 INFO [train.py:898] (1/4) Epoch 21, batch 1300, loss[loss=0.1701, simple_loss=0.2579, pruned_loss=0.04112, over 17948.00 frames. ], tot_loss[loss=0.1648, simple_loss=0.2542, pruned_loss=0.0377, over 3567053.69 frames. ], batch size: 65, lr: 5.39e-03, grad_scale: 8.0 +2023-03-09 14:00:50,369 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.40 vs. limit=2.0 +2023-03-09 14:01:00,267 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73997.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:01:18,613 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7963, 3.6149, 4.9658, 2.6570, 4.3430, 2.5970, 3.0351, 1.7443], + device='cuda:1'), covar=tensor([0.1216, 0.0975, 0.0162, 0.1078, 0.0527, 0.2658, 0.2656, 0.2227], + device='cuda:1'), in_proj_covar=tensor([0.0222, 0.0247, 0.0190, 0.0199, 0.0261, 0.0274, 0.0325, 0.0236], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 14:01:45,716 INFO [train.py:898] (1/4) Epoch 21, batch 1350, loss[loss=0.1757, simple_loss=0.2661, pruned_loss=0.04269, over 18110.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.254, pruned_loss=0.03757, over 3568619.10 frames. ], batch size: 62, lr: 5.39e-03, grad_scale: 8.0 +2023-03-09 14:02:05,040 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9490, 3.7522, 5.0042, 4.5641, 3.3383, 3.1835, 4.5646, 5.3784], + device='cuda:1'), covar=tensor([0.0756, 0.1503, 0.0213, 0.0338, 0.0905, 0.1094, 0.0364, 0.0240], + device='cuda:1'), in_proj_covar=tensor([0.0148, 0.0273, 0.0150, 0.0181, 0.0191, 0.0190, 0.0194, 0.0194], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 14:02:19,819 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4749, 2.6057, 2.5206, 2.8710, 3.6144, 3.4886, 2.9800, 2.8280], + device='cuda:1'), covar=tensor([0.0211, 0.0298, 0.0530, 0.0388, 0.0157, 0.0152, 0.0429, 0.0402], + device='cuda:1'), in_proj_covar=tensor([0.0139, 0.0132, 0.0160, 0.0155, 0.0127, 0.0115, 0.0151, 0.0154], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 14:02:22,963 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74064.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:02:39,203 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.953e+02 2.917e+02 3.282e+02 4.135e+02 8.979e+02, threshold=6.564e+02, percent-clipped=10.0 +2023-03-09 14:02:43,779 INFO [train.py:898] (1/4) Epoch 21, batch 1400, loss[loss=0.1637, simple_loss=0.2572, pruned_loss=0.03509, over 18197.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2528, pruned_loss=0.0372, over 3582800.29 frames. ], batch size: 60, lr: 5.39e-03, grad_scale: 8.0 +2023-03-09 14:02:51,613 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74088.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:03:11,148 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-03-09 14:03:28,628 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.68 vs. limit=2.0 +2023-03-09 14:03:35,074 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74125.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:03:41,405 INFO [train.py:898] (1/4) Epoch 21, batch 1450, loss[loss=0.1776, simple_loss=0.2618, pruned_loss=0.04677, over 16011.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2524, pruned_loss=0.03692, over 3584053.84 frames. ], batch size: 94, lr: 5.39e-03, grad_scale: 8.0 +2023-03-09 14:04:33,933 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7031, 3.1651, 4.4553, 3.9685, 2.9001, 4.7780, 4.1128, 3.1565], + device='cuda:1'), covar=tensor([0.0496, 0.1339, 0.0273, 0.0393, 0.1417, 0.0231, 0.0510, 0.0893], + device='cuda:1'), in_proj_covar=tensor([0.0206, 0.0235, 0.0206, 0.0159, 0.0220, 0.0209, 0.0242, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 14:04:35,771 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.991e+02 2.519e+02 2.943e+02 3.724e+02 1.372e+03, threshold=5.886e+02, percent-clipped=4.0 +2023-03-09 14:04:37,321 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7921, 3.7924, 3.6123, 3.2025, 3.5220, 2.6982, 2.6392, 3.7958], + device='cuda:1'), covar=tensor([0.0067, 0.0084, 0.0094, 0.0146, 0.0105, 0.0243, 0.0319, 0.0067], + device='cuda:1'), in_proj_covar=tensor([0.0137, 0.0155, 0.0133, 0.0184, 0.0139, 0.0176, 0.0181, 0.0120], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 14:04:39,232 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-03-09 14:04:40,826 INFO [train.py:898] (1/4) Epoch 21, batch 1500, loss[loss=0.1567, simple_loss=0.2422, pruned_loss=0.03556, over 18275.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2518, pruned_loss=0.03684, over 3586807.71 frames. ], batch size: 47, lr: 5.38e-03, grad_scale: 8.0 +2023-03-09 14:04:43,553 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9448, 3.7401, 5.0354, 4.5975, 3.2885, 3.1681, 4.5124, 5.2857], + device='cuda:1'), covar=tensor([0.0779, 0.1475, 0.0180, 0.0301, 0.0928, 0.1079, 0.0354, 0.0205], + device='cuda:1'), in_proj_covar=tensor([0.0148, 0.0273, 0.0151, 0.0181, 0.0192, 0.0190, 0.0194, 0.0195], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 14:05:13,091 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1557, 4.2743, 2.6184, 4.2991, 5.4458, 2.8104, 3.8630, 4.1561], + device='cuda:1'), covar=tensor([0.0139, 0.1138, 0.1554, 0.0562, 0.0063, 0.1125, 0.0651, 0.0638], + device='cuda:1'), in_proj_covar=tensor([0.0169, 0.0273, 0.0206, 0.0198, 0.0127, 0.0185, 0.0216, 0.0227], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 14:05:39,324 INFO [train.py:898] (1/4) Epoch 21, batch 1550, loss[loss=0.1712, simple_loss=0.2701, pruned_loss=0.03611, over 18389.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2517, pruned_loss=0.03642, over 3596029.24 frames. ], batch size: 52, lr: 5.38e-03, grad_scale: 8.0 +2023-03-09 14:05:50,295 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74240.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:05:56,939 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74246.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:06:16,485 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0717, 3.8061, 5.1853, 2.7956, 4.5274, 2.7377, 3.1542, 1.8670], + device='cuda:1'), covar=tensor([0.1048, 0.0871, 0.0168, 0.1010, 0.0486, 0.2564, 0.2745, 0.2128], + device='cuda:1'), in_proj_covar=tensor([0.0221, 0.0246, 0.0189, 0.0198, 0.0261, 0.0272, 0.0323, 0.0235], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 14:06:32,520 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.761e+02 2.609e+02 3.003e+02 3.447e+02 5.872e+02, threshold=6.005e+02, percent-clipped=0.0 +2023-03-09 14:06:37,155 INFO [train.py:898] (1/4) Epoch 21, batch 1600, loss[loss=0.1485, simple_loss=0.2436, pruned_loss=0.02668, over 18394.00 frames. ], tot_loss[loss=0.162, simple_loss=0.2512, pruned_loss=0.03635, over 3598090.75 frames. ], batch size: 52, lr: 5.38e-03, grad_scale: 8.0 +2023-03-09 14:06:53,630 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74294.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:06:53,741 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.6555, 6.1118, 5.6658, 5.9677, 5.7610, 5.6590, 6.2435, 6.1631], + device='cuda:1'), covar=tensor([0.1190, 0.0873, 0.0424, 0.0736, 0.1371, 0.0670, 0.0579, 0.0720], + device='cuda:1'), in_proj_covar=tensor([0.0612, 0.0530, 0.0381, 0.0549, 0.0742, 0.0546, 0.0745, 0.0570], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 14:07:01,940 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74301.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:07:36,396 INFO [train.py:898] (1/4) Epoch 21, batch 1650, loss[loss=0.1679, simple_loss=0.266, pruned_loss=0.03488, over 18634.00 frames. ], tot_loss[loss=0.1616, simple_loss=0.2512, pruned_loss=0.03606, over 3610702.08 frames. ], batch size: 52, lr: 5.38e-03, grad_scale: 8.0 +2023-03-09 14:08:18,887 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6649, 2.5535, 4.4359, 4.1173, 2.5977, 4.6961, 4.0159, 2.9594], + device='cuda:1'), covar=tensor([0.0506, 0.2019, 0.0295, 0.0332, 0.1926, 0.0257, 0.0515, 0.1248], + device='cuda:1'), in_proj_covar=tensor([0.0205, 0.0235, 0.0206, 0.0158, 0.0219, 0.0208, 0.0241, 0.0195], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 14:08:30,594 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.874e+02 2.715e+02 3.292e+02 3.947e+02 6.741e+02, threshold=6.584e+02, percent-clipped=1.0 +2023-03-09 14:08:35,088 INFO [train.py:898] (1/4) Epoch 21, batch 1700, loss[loss=0.1667, simple_loss=0.258, pruned_loss=0.03769, over 18318.00 frames. ], tot_loss[loss=0.1621, simple_loss=0.2516, pruned_loss=0.03629, over 3602355.30 frames. ], batch size: 54, lr: 5.38e-03, grad_scale: 8.0 +2023-03-09 14:08:44,099 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74388.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:09:09,675 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.67 vs. limit=2.0 +2023-03-09 14:09:10,489 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0772, 4.3015, 2.6707, 4.3363, 5.4067, 2.8631, 3.8638, 4.1392], + device='cuda:1'), covar=tensor([0.0170, 0.1085, 0.1503, 0.0541, 0.0072, 0.1135, 0.0650, 0.0691], + device='cuda:1'), in_proj_covar=tensor([0.0168, 0.0270, 0.0203, 0.0196, 0.0126, 0.0183, 0.0215, 0.0224], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 14:09:20,313 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74420.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:09:33,169 INFO [train.py:898] (1/4) Epoch 21, batch 1750, loss[loss=0.1691, simple_loss=0.2595, pruned_loss=0.03935, over 17773.00 frames. ], tot_loss[loss=0.1621, simple_loss=0.2516, pruned_loss=0.03628, over 3607199.94 frames. ], batch size: 70, lr: 5.37e-03, grad_scale: 8.0 +2023-03-09 14:09:38,727 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74436.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:10:25,798 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.991e+02 2.679e+02 3.149e+02 3.785e+02 6.522e+02, threshold=6.298e+02, percent-clipped=0.0 +2023-03-09 14:10:30,658 INFO [train.py:898] (1/4) Epoch 21, batch 1800, loss[loss=0.1768, simple_loss=0.2658, pruned_loss=0.04388, over 18349.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2521, pruned_loss=0.03656, over 3609903.71 frames. ], batch size: 56, lr: 5.37e-03, grad_scale: 8.0 +2023-03-09 14:11:28,024 INFO [train.py:898] (1/4) Epoch 21, batch 1850, loss[loss=0.1627, simple_loss=0.2537, pruned_loss=0.03583, over 18284.00 frames. ], tot_loss[loss=0.1629, simple_loss=0.2526, pruned_loss=0.0366, over 3606571.10 frames. ], batch size: 57, lr: 5.37e-03, grad_scale: 8.0 +2023-03-09 14:11:32,623 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9106, 3.7384, 5.1736, 2.8704, 4.5873, 2.9151, 3.2162, 1.9501], + device='cuda:1'), covar=tensor([0.1108, 0.0892, 0.0139, 0.0919, 0.0473, 0.2237, 0.2379, 0.2037], + device='cuda:1'), in_proj_covar=tensor([0.0221, 0.0246, 0.0190, 0.0198, 0.0260, 0.0273, 0.0323, 0.0235], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 14:11:41,522 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1542, 5.5021, 2.8376, 5.3172, 5.2080, 5.5098, 5.3387, 2.8479], + device='cuda:1'), covar=tensor([0.0182, 0.0050, 0.0729, 0.0065, 0.0065, 0.0056, 0.0070, 0.0962], + device='cuda:1'), in_proj_covar=tensor([0.0088, 0.0080, 0.0095, 0.0095, 0.0084, 0.0075, 0.0084, 0.0097], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 14:11:44,088 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.67 vs. limit=2.0 +2023-03-09 14:12:21,972 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.780e+02 2.538e+02 3.055e+02 3.545e+02 5.986e+02, threshold=6.111e+02, percent-clipped=0.0 +2023-03-09 14:12:26,475 INFO [train.py:898] (1/4) Epoch 21, batch 1900, loss[loss=0.1856, simple_loss=0.2751, pruned_loss=0.048, over 17196.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2524, pruned_loss=0.03654, over 3608667.56 frames. ], batch size: 78, lr: 5.37e-03, grad_scale: 8.0 +2023-03-09 14:12:43,201 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74595.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:12:44,254 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74596.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:13:24,948 INFO [train.py:898] (1/4) Epoch 21, batch 1950, loss[loss=0.1721, simple_loss=0.275, pruned_loss=0.03462, over 18481.00 frames. ], tot_loss[loss=0.1625, simple_loss=0.2524, pruned_loss=0.03628, over 3611042.94 frames. ], batch size: 53, lr: 5.37e-03, grad_scale: 16.0 +2023-03-09 14:13:40,506 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1629, 5.1856, 5.2644, 4.9313, 5.0647, 4.9662, 5.3548, 5.2606], + device='cuda:1'), covar=tensor([0.0055, 0.0064, 0.0052, 0.0108, 0.0056, 0.0151, 0.0076, 0.0098], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0069, 0.0074, 0.0092, 0.0075, 0.0104, 0.0086, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 14:13:56,097 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74656.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:14:07,424 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5849, 2.9875, 4.4152, 3.8209, 2.7156, 4.6398, 4.0257, 2.9089], + device='cuda:1'), covar=tensor([0.0526, 0.1374, 0.0268, 0.0412, 0.1464, 0.0212, 0.0450, 0.0926], + device='cuda:1'), in_proj_covar=tensor([0.0208, 0.0238, 0.0208, 0.0160, 0.0220, 0.0210, 0.0242, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 14:14:19,404 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.749e+02 2.532e+02 2.978e+02 3.558e+02 7.453e+02, threshold=5.957e+02, percent-clipped=1.0 +2023-03-09 14:14:23,892 INFO [train.py:898] (1/4) Epoch 21, batch 2000, loss[loss=0.1662, simple_loss=0.2546, pruned_loss=0.03885, over 18196.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2525, pruned_loss=0.0365, over 3591569.32 frames. ], batch size: 60, lr: 5.37e-03, grad_scale: 16.0 +2023-03-09 14:15:09,497 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74720.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:15:21,706 INFO [train.py:898] (1/4) Epoch 21, batch 2050, loss[loss=0.1663, simple_loss=0.2641, pruned_loss=0.03421, over 18630.00 frames. ], tot_loss[loss=0.163, simple_loss=0.2528, pruned_loss=0.03666, over 3588854.03 frames. ], batch size: 52, lr: 5.36e-03, grad_scale: 16.0 +2023-03-09 14:15:57,144 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7389, 2.4685, 2.6651, 2.7857, 3.4817, 5.0653, 4.9884, 3.3889], + device='cuda:1'), covar=tensor([0.1856, 0.2423, 0.3243, 0.1875, 0.2260, 0.0213, 0.0316, 0.0999], + device='cuda:1'), in_proj_covar=tensor([0.0299, 0.0347, 0.0383, 0.0277, 0.0387, 0.0241, 0.0295, 0.0255], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 14:16:05,320 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74768.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:16:15,321 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.051e+02 2.659e+02 3.081e+02 3.760e+02 7.989e+02, threshold=6.163e+02, percent-clipped=4.0 +2023-03-09 14:16:19,623 INFO [train.py:898] (1/4) Epoch 21, batch 2100, loss[loss=0.1446, simple_loss=0.2395, pruned_loss=0.02489, over 18370.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.253, pruned_loss=0.03676, over 3601485.25 frames. ], batch size: 50, lr: 5.36e-03, grad_scale: 16.0 +2023-03-09 14:16:29,008 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9131, 4.9522, 4.9787, 4.7178, 4.7366, 4.7597, 5.0789, 5.0475], + device='cuda:1'), covar=tensor([0.0076, 0.0076, 0.0068, 0.0122, 0.0070, 0.0148, 0.0100, 0.0108], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0069, 0.0074, 0.0092, 0.0075, 0.0103, 0.0086, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 14:17:04,359 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 14:17:17,969 INFO [train.py:898] (1/4) Epoch 21, batch 2150, loss[loss=0.1623, simple_loss=0.2525, pruned_loss=0.03601, over 18491.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.254, pruned_loss=0.03718, over 3574775.50 frames. ], batch size: 53, lr: 5.36e-03, grad_scale: 16.0 +2023-03-09 14:17:45,563 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0137, 3.7818, 5.1413, 3.0441, 4.4968, 2.7692, 3.2600, 1.9578], + device='cuda:1'), covar=tensor([0.1078, 0.0886, 0.0146, 0.0898, 0.0542, 0.2617, 0.2382, 0.2093], + device='cuda:1'), in_proj_covar=tensor([0.0220, 0.0245, 0.0191, 0.0197, 0.0260, 0.0273, 0.0323, 0.0235], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 14:18:11,060 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.902e+02 2.637e+02 3.175e+02 3.840e+02 7.861e+02, threshold=6.351e+02, percent-clipped=2.0 +2023-03-09 14:18:15,486 INFO [train.py:898] (1/4) Epoch 21, batch 2200, loss[loss=0.172, simple_loss=0.256, pruned_loss=0.04401, over 18278.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.2539, pruned_loss=0.03737, over 3571442.33 frames. ], batch size: 47, lr: 5.36e-03, grad_scale: 16.0 +2023-03-09 14:18:19,233 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9615, 5.0221, 5.1044, 4.8178, 4.9227, 4.8476, 5.1664, 5.1699], + device='cuda:1'), covar=tensor([0.0069, 0.0065, 0.0055, 0.0101, 0.0049, 0.0119, 0.0079, 0.0083], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0068, 0.0073, 0.0091, 0.0074, 0.0102, 0.0085, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 14:18:25,916 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7723, 5.2360, 5.2192, 5.2624, 4.7830, 5.1211, 4.5732, 5.1343], + device='cuda:1'), covar=tensor([0.0237, 0.0261, 0.0179, 0.0367, 0.0403, 0.0217, 0.1042, 0.0293], + device='cuda:1'), in_proj_covar=tensor([0.0218, 0.0263, 0.0255, 0.0329, 0.0270, 0.0270, 0.0311, 0.0261], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 14:18:32,668 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74896.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:18:47,807 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74909.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:19:13,475 INFO [train.py:898] (1/4) Epoch 21, batch 2250, loss[loss=0.1505, simple_loss=0.2407, pruned_loss=0.03011, over 18265.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2532, pruned_loss=0.03702, over 3578067.63 frames. ], batch size: 47, lr: 5.36e-03, grad_scale: 16.0 +2023-03-09 14:19:28,459 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74944.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:19:36,425 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74951.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:19:46,658 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.92 vs. limit=5.0 +2023-03-09 14:19:51,880 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74964.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:19:52,025 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6498, 3.5356, 4.8109, 4.1710, 3.1341, 2.9890, 4.2020, 5.0249], + device='cuda:1'), covar=tensor([0.0848, 0.1362, 0.0201, 0.0435, 0.1050, 0.1265, 0.0450, 0.0265], + device='cuda:1'), in_proj_covar=tensor([0.0147, 0.0273, 0.0151, 0.0181, 0.0191, 0.0190, 0.0194, 0.0195], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 14:19:59,312 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74970.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:20:08,788 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.078e+02 2.763e+02 3.199e+02 3.835e+02 8.109e+02, threshold=6.398e+02, percent-clipped=3.0 +2023-03-09 14:20:12,092 INFO [train.py:898] (1/4) Epoch 21, batch 2300, loss[loss=0.213, simple_loss=0.294, pruned_loss=0.06605, over 12848.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2533, pruned_loss=0.03716, over 3581456.93 frames. ], batch size: 129, lr: 5.35e-03, grad_scale: 8.0 +2023-03-09 14:20:12,728 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-09 14:20:57,747 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4560, 3.2023, 2.2203, 4.2352, 2.9718, 4.1219, 2.3364, 3.7972], + device='cuda:1'), covar=tensor([0.0700, 0.1006, 0.1527, 0.0525, 0.0950, 0.0253, 0.1317, 0.0417], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0226, 0.0190, 0.0284, 0.0192, 0.0263, 0.0200, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 14:21:03,786 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75025.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:21:10,837 INFO [train.py:898] (1/4) Epoch 21, batch 2350, loss[loss=0.1648, simple_loss=0.2531, pruned_loss=0.03828, over 18274.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2528, pruned_loss=0.03694, over 3584476.41 frames. ], batch size: 57, lr: 5.35e-03, grad_scale: 8.0 +2023-03-09 14:21:47,565 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0051, 3.8045, 5.2079, 2.9553, 4.6566, 2.7759, 3.0693, 1.8608], + device='cuda:1'), covar=tensor([0.1096, 0.0875, 0.0118, 0.0898, 0.0435, 0.2446, 0.2718, 0.2161], + device='cuda:1'), in_proj_covar=tensor([0.0220, 0.0246, 0.0190, 0.0197, 0.0259, 0.0272, 0.0322, 0.0235], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 14:22:04,856 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.756e+02 2.674e+02 3.137e+02 3.722e+02 6.361e+02, threshold=6.274e+02, percent-clipped=0.0 +2023-03-09 14:22:08,959 INFO [train.py:898] (1/4) Epoch 21, batch 2400, loss[loss=0.1535, simple_loss=0.2395, pruned_loss=0.03377, over 18534.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2523, pruned_loss=0.03668, over 3594168.99 frames. ], batch size: 49, lr: 5.35e-03, grad_scale: 8.0 +2023-03-09 14:23:06,749 INFO [train.py:898] (1/4) Epoch 21, batch 2450, loss[loss=0.179, simple_loss=0.2715, pruned_loss=0.0432, over 18026.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2523, pruned_loss=0.03667, over 3595042.54 frames. ], batch size: 65, lr: 5.35e-03, grad_scale: 8.0 +2023-03-09 14:23:14,282 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5498, 4.1266, 4.0854, 3.2357, 3.4368, 3.2495, 2.4638, 2.2507], + device='cuda:1'), covar=tensor([0.0230, 0.0164, 0.0094, 0.0319, 0.0363, 0.0238, 0.0757, 0.0877], + device='cuda:1'), in_proj_covar=tensor([0.0069, 0.0059, 0.0062, 0.0067, 0.0089, 0.0066, 0.0077, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 14:23:52,903 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-09 14:24:00,721 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.788e+02 2.611e+02 3.062e+02 3.854e+02 9.257e+02, threshold=6.124e+02, percent-clipped=4.0 +2023-03-09 14:24:03,969 INFO [train.py:898] (1/4) Epoch 21, batch 2500, loss[loss=0.154, simple_loss=0.2497, pruned_loss=0.02917, over 18628.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2534, pruned_loss=0.03707, over 3588904.06 frames. ], batch size: 52, lr: 5.35e-03, grad_scale: 8.0 +2023-03-09 14:24:22,936 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9062, 3.6067, 4.9811, 2.7906, 4.3557, 2.5270, 3.0380, 1.7573], + device='cuda:1'), covar=tensor([0.1141, 0.0979, 0.0166, 0.0941, 0.0540, 0.2715, 0.2734, 0.2217], + device='cuda:1'), in_proj_covar=tensor([0.0218, 0.0244, 0.0189, 0.0195, 0.0256, 0.0269, 0.0319, 0.0232], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 14:24:46,830 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-09 14:24:59,559 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75228.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:25:02,699 INFO [train.py:898] (1/4) Epoch 21, batch 2550, loss[loss=0.1638, simple_loss=0.2608, pruned_loss=0.03339, over 18473.00 frames. ], tot_loss[loss=0.1635, simple_loss=0.2532, pruned_loss=0.03693, over 3588315.16 frames. ], batch size: 51, lr: 5.35e-03, grad_scale: 8.0 +2023-03-09 14:25:26,233 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75251.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:25:33,760 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.34 vs. limit=5.0 +2023-03-09 14:25:42,167 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75265.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:25:48,520 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75270.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:25:57,522 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.566e+02 2.536e+02 3.166e+02 3.977e+02 7.777e+02, threshold=6.331e+02, percent-clipped=2.0 +2023-03-09 14:26:01,083 INFO [train.py:898] (1/4) Epoch 21, batch 2600, loss[loss=0.1551, simple_loss=0.2414, pruned_loss=0.03439, over 18413.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.2519, pruned_loss=0.03652, over 3591576.02 frames. ], batch size: 48, lr: 5.34e-03, grad_scale: 8.0 +2023-03-09 14:26:11,277 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75289.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:26:16,978 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8468, 3.7314, 2.4618, 4.5745, 3.3056, 4.4704, 2.8388, 4.3438], + device='cuda:1'), covar=tensor([0.0527, 0.0770, 0.1407, 0.0450, 0.0756, 0.0286, 0.1069, 0.0328], + device='cuda:1'), in_proj_covar=tensor([0.0216, 0.0228, 0.0192, 0.0287, 0.0193, 0.0265, 0.0202, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 14:26:22,513 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75299.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:26:45,873 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75320.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:26:59,264 INFO [train.py:898] (1/4) Epoch 21, batch 2650, loss[loss=0.1716, simple_loss=0.2587, pruned_loss=0.04219, over 18089.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.252, pruned_loss=0.03667, over 3589551.37 frames. ], batch size: 62, lr: 5.34e-03, grad_scale: 8.0 +2023-03-09 14:26:59,621 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75331.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 14:27:53,150 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.804e+02 2.756e+02 3.274e+02 3.890e+02 7.803e+02, threshold=6.547e+02, percent-clipped=3.0 +2023-03-09 14:27:57,176 INFO [train.py:898] (1/4) Epoch 21, batch 2700, loss[loss=0.1529, simple_loss=0.2487, pruned_loss=0.02855, over 18401.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2524, pruned_loss=0.03657, over 3597195.17 frames. ], batch size: 48, lr: 5.34e-03, grad_scale: 8.0 +2023-03-09 14:28:06,518 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9548, 4.9400, 5.0175, 4.7500, 4.8004, 4.7828, 5.1251, 5.0717], + device='cuda:1'), covar=tensor([0.0078, 0.0069, 0.0062, 0.0116, 0.0064, 0.0138, 0.0070, 0.0089], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0069, 0.0074, 0.0092, 0.0074, 0.0103, 0.0085, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 14:28:55,176 INFO [train.py:898] (1/4) Epoch 21, batch 2750, loss[loss=0.1466, simple_loss=0.2232, pruned_loss=0.03504, over 18443.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2524, pruned_loss=0.03687, over 3596869.54 frames. ], batch size: 43, lr: 5.34e-03, grad_scale: 4.0 +2023-03-09 14:29:19,175 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.13 vs. limit=5.0 +2023-03-09 14:29:40,858 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.58 vs. limit=2.0 +2023-03-09 14:29:50,239 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.664e+02 2.526e+02 3.249e+02 3.895e+02 6.224e+02, threshold=6.498e+02, percent-clipped=0.0 +2023-03-09 14:29:52,457 INFO [train.py:898] (1/4) Epoch 21, batch 2800, loss[loss=0.1431, simple_loss=0.2265, pruned_loss=0.02984, over 18525.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2516, pruned_loss=0.03648, over 3603274.14 frames. ], batch size: 44, lr: 5.34e-03, grad_scale: 8.0 +2023-03-09 14:30:50,509 INFO [train.py:898] (1/4) Epoch 21, batch 2850, loss[loss=0.153, simple_loss=0.2318, pruned_loss=0.03713, over 17648.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2522, pruned_loss=0.03673, over 3594380.57 frames. ], batch size: 39, lr: 5.34e-03, grad_scale: 8.0 +2023-03-09 14:31:30,953 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75565.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:31:46,371 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.918e+02 2.851e+02 3.414e+02 4.346e+02 8.993e+02, threshold=6.828e+02, percent-clipped=4.0 +2023-03-09 14:31:48,601 INFO [train.py:898] (1/4) Epoch 21, batch 2900, loss[loss=0.1715, simple_loss=0.2699, pruned_loss=0.03656, over 18488.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2532, pruned_loss=0.03683, over 3593215.70 frames. ], batch size: 53, lr: 5.33e-03, grad_scale: 8.0 +2023-03-09 14:31:50,043 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9915, 3.7260, 5.0557, 4.4786, 3.2389, 3.0711, 4.5410, 5.2471], + device='cuda:1'), covar=tensor([0.0764, 0.1541, 0.0234, 0.0418, 0.1027, 0.1223, 0.0438, 0.0277], + device='cuda:1'), in_proj_covar=tensor([0.0148, 0.0275, 0.0152, 0.0181, 0.0191, 0.0190, 0.0195, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 14:31:52,549 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75584.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:32:27,074 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75613.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:32:34,980 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75620.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:32:41,938 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75626.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 14:32:47,479 INFO [train.py:898] (1/4) Epoch 21, batch 2950, loss[loss=0.1802, simple_loss=0.2698, pruned_loss=0.04525, over 17944.00 frames. ], tot_loss[loss=0.1635, simple_loss=0.2535, pruned_loss=0.03671, over 3599622.55 frames. ], batch size: 65, lr: 5.33e-03, grad_scale: 8.0 +2023-03-09 14:33:12,637 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.3701, 3.7817, 5.0067, 3.9699, 2.7338, 2.5978, 4.2144, 5.1450], + device='cuda:1'), covar=tensor([0.0930, 0.1306, 0.0135, 0.0517, 0.1140, 0.1324, 0.0429, 0.0203], + device='cuda:1'), in_proj_covar=tensor([0.0147, 0.0274, 0.0152, 0.0181, 0.0190, 0.0190, 0.0195, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 14:33:31,425 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75668.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:33:43,525 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.752e+02 2.513e+02 3.131e+02 3.690e+02 6.570e+02, threshold=6.263e+02, percent-clipped=0.0 +2023-03-09 14:33:45,503 INFO [train.py:898] (1/4) Epoch 21, batch 3000, loss[loss=0.1877, simple_loss=0.2751, pruned_loss=0.05017, over 18488.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2535, pruned_loss=0.03687, over 3598084.54 frames. ], batch size: 59, lr: 5.33e-03, grad_scale: 8.0 +2023-03-09 14:33:45,504 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 14:33:57,965 INFO [train.py:932] (1/4) Epoch 21, validation: loss=0.1498, simple_loss=0.2495, pruned_loss=0.02501, over 944034.00 frames. +2023-03-09 14:33:57,966 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 14:34:48,799 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8571, 2.9596, 2.9003, 3.1440, 3.9152, 3.7851, 3.3795, 3.1438], + device='cuda:1'), covar=tensor([0.0166, 0.0290, 0.0413, 0.0296, 0.0159, 0.0145, 0.0282, 0.0319], + device='cuda:1'), in_proj_covar=tensor([0.0138, 0.0132, 0.0161, 0.0156, 0.0128, 0.0116, 0.0152, 0.0152], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 14:34:55,551 INFO [train.py:898] (1/4) Epoch 21, batch 3050, loss[loss=0.1588, simple_loss=0.2442, pruned_loss=0.03674, over 18405.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2527, pruned_loss=0.03673, over 3600959.57 frames. ], batch size: 48, lr: 5.33e-03, grad_scale: 8.0 +2023-03-09 14:35:12,396 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1453, 5.2092, 5.2765, 5.0054, 4.9872, 5.0720, 5.3686, 5.3526], + device='cuda:1'), covar=tensor([0.0057, 0.0048, 0.0044, 0.0087, 0.0057, 0.0143, 0.0049, 0.0068], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0070, 0.0074, 0.0092, 0.0075, 0.0104, 0.0086, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 14:35:51,758 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.562e+02 2.753e+02 3.195e+02 3.749e+02 6.792e+02, threshold=6.390e+02, percent-clipped=2.0 +2023-03-09 14:35:54,446 INFO [train.py:898] (1/4) Epoch 21, batch 3100, loss[loss=0.1613, simple_loss=0.2375, pruned_loss=0.04256, over 17592.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2524, pruned_loss=0.03652, over 3602049.60 frames. ], batch size: 39, lr: 5.33e-03, grad_scale: 8.0 +2023-03-09 14:36:04,431 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.44 vs. limit=2.0 +2023-03-09 14:36:46,442 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9386, 5.4068, 5.3372, 5.3839, 4.8723, 5.2977, 4.7045, 5.2380], + device='cuda:1'), covar=tensor([0.0199, 0.0252, 0.0181, 0.0385, 0.0398, 0.0201, 0.0970, 0.0298], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0258, 0.0253, 0.0325, 0.0267, 0.0266, 0.0308, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 14:36:52,846 INFO [train.py:898] (1/4) Epoch 21, batch 3150, loss[loss=0.1584, simple_loss=0.2385, pruned_loss=0.03913, over 18171.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2524, pruned_loss=0.03666, over 3601221.82 frames. ], batch size: 44, lr: 5.32e-03, grad_scale: 8.0 +2023-03-09 14:36:59,371 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0599, 5.3564, 2.6928, 5.2108, 5.0752, 5.3459, 5.1678, 2.7089], + device='cuda:1'), covar=tensor([0.0204, 0.0075, 0.0807, 0.0094, 0.0067, 0.0067, 0.0089, 0.1048], + device='cuda:1'), in_proj_covar=tensor([0.0088, 0.0081, 0.0095, 0.0095, 0.0085, 0.0076, 0.0085, 0.0097], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 14:37:44,044 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5922, 2.8968, 4.4068, 3.7929, 2.6653, 4.6419, 3.8775, 2.9968], + device='cuda:1'), covar=tensor([0.0503, 0.1431, 0.0249, 0.0422, 0.1525, 0.0201, 0.0562, 0.0956], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0239, 0.0212, 0.0163, 0.0222, 0.0212, 0.0245, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 14:37:49,361 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.108e+02 2.640e+02 3.076e+02 3.933e+02 6.282e+02, threshold=6.152e+02, percent-clipped=0.0 +2023-03-09 14:37:51,672 INFO [train.py:898] (1/4) Epoch 21, batch 3200, loss[loss=0.1776, simple_loss=0.2686, pruned_loss=0.04335, over 16111.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2527, pruned_loss=0.03651, over 3599422.20 frames. ], batch size: 94, lr: 5.32e-03, grad_scale: 8.0 +2023-03-09 14:37:55,400 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75884.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:38:28,188 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.71 vs. limit=2.0 +2023-03-09 14:38:45,222 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75926.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 14:38:50,629 INFO [train.py:898] (1/4) Epoch 21, batch 3250, loss[loss=0.1776, simple_loss=0.2713, pruned_loss=0.04199, over 18376.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.2522, pruned_loss=0.03631, over 3608735.73 frames. ], batch size: 55, lr: 5.32e-03, grad_scale: 8.0 +2023-03-09 14:38:51,962 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75932.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:39:25,484 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 14:39:41,429 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75974.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:39:46,931 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.093e+02 2.622e+02 3.085e+02 3.781e+02 1.173e+03, threshold=6.171e+02, percent-clipped=7.0 +2023-03-09 14:39:49,195 INFO [train.py:898] (1/4) Epoch 21, batch 3300, loss[loss=0.1725, simple_loss=0.2645, pruned_loss=0.04027, over 18541.00 frames. ], tot_loss[loss=0.1625, simple_loss=0.2524, pruned_loss=0.03633, over 3594213.06 frames. ], batch size: 54, lr: 5.32e-03, grad_scale: 8.0 +2023-03-09 14:39:52,203 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.14 vs. limit=5.0 +2023-03-09 14:40:52,535 INFO [train.py:898] (1/4) Epoch 21, batch 3350, loss[loss=0.1444, simple_loss=0.2272, pruned_loss=0.03075, over 18423.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2531, pruned_loss=0.03688, over 3586720.89 frames. ], batch size: 42, lr: 5.32e-03, grad_scale: 8.0 +2023-03-09 14:41:48,799 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.784e+02 2.702e+02 3.276e+02 4.045e+02 1.355e+03, threshold=6.552e+02, percent-clipped=6.0 +2023-03-09 14:41:51,073 INFO [train.py:898] (1/4) Epoch 21, batch 3400, loss[loss=0.1514, simple_loss=0.2426, pruned_loss=0.03012, over 18257.00 frames. ], tot_loss[loss=0.163, simple_loss=0.2524, pruned_loss=0.03681, over 3579624.43 frames. ], batch size: 47, lr: 5.32e-03, grad_scale: 8.0 +2023-03-09 14:42:06,078 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0304, 5.4502, 2.7492, 5.2470, 5.1109, 5.4342, 5.2666, 2.7888], + device='cuda:1'), covar=tensor([0.0200, 0.0052, 0.0770, 0.0071, 0.0069, 0.0054, 0.0068, 0.0962], + device='cuda:1'), in_proj_covar=tensor([0.0089, 0.0081, 0.0097, 0.0096, 0.0086, 0.0076, 0.0086, 0.0098], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 14:42:35,535 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3431, 5.3073, 4.9500, 5.2828, 5.2607, 4.6851, 5.1892, 4.9315], + device='cuda:1'), covar=tensor([0.0481, 0.0449, 0.1389, 0.0776, 0.0626, 0.0435, 0.0421, 0.1051], + device='cuda:1'), in_proj_covar=tensor([0.0502, 0.0564, 0.0707, 0.0440, 0.0454, 0.0511, 0.0545, 0.0685], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 14:42:49,331 INFO [train.py:898] (1/4) Epoch 21, batch 3450, loss[loss=0.1739, simple_loss=0.2639, pruned_loss=0.04194, over 18379.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2527, pruned_loss=0.03701, over 3585507.20 frames. ], batch size: 48, lr: 5.31e-03, grad_scale: 8.0 +2023-03-09 14:42:58,912 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.85 vs. limit=5.0 +2023-03-09 14:43:45,619 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.955e+02 2.600e+02 3.134e+02 3.743e+02 8.321e+02, threshold=6.269e+02, percent-clipped=3.0 +2023-03-09 14:43:47,842 INFO [train.py:898] (1/4) Epoch 21, batch 3500, loss[loss=0.1727, simple_loss=0.2664, pruned_loss=0.03946, over 18573.00 frames. ], tot_loss[loss=0.1625, simple_loss=0.2518, pruned_loss=0.03654, over 3599601.09 frames. ], batch size: 54, lr: 5.31e-03, grad_scale: 8.0 +2023-03-09 14:44:43,826 INFO [train.py:898] (1/4) Epoch 21, batch 3550, loss[loss=0.1616, simple_loss=0.2468, pruned_loss=0.03819, over 18500.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2519, pruned_loss=0.03678, over 3590773.12 frames. ], batch size: 47, lr: 5.31e-03, grad_scale: 4.0 +2023-03-09 14:45:20,238 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8077, 4.9140, 4.9410, 4.7152, 4.7267, 4.7073, 5.0466, 5.0323], + device='cuda:1'), covar=tensor([0.0085, 0.0069, 0.0076, 0.0106, 0.0061, 0.0163, 0.0075, 0.0104], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0069, 0.0074, 0.0092, 0.0074, 0.0104, 0.0086, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 14:45:24,524 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76269.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 14:45:36,125 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.098e+02 2.775e+02 3.080e+02 3.770e+02 8.534e+02, threshold=6.159e+02, percent-clipped=2.0 +2023-03-09 14:45:36,624 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6189, 2.4364, 2.6755, 2.7675, 3.2889, 4.8691, 4.8624, 3.3470], + device='cuda:1'), covar=tensor([0.1929, 0.2546, 0.3005, 0.1867, 0.2403, 0.0251, 0.0343, 0.1010], + device='cuda:1'), in_proj_covar=tensor([0.0302, 0.0348, 0.0384, 0.0278, 0.0390, 0.0243, 0.0297, 0.0256], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 14:45:37,247 INFO [train.py:898] (1/4) Epoch 21, batch 3600, loss[loss=0.1531, simple_loss=0.2408, pruned_loss=0.03268, over 18365.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2523, pruned_loss=0.03667, over 3601706.73 frames. ], batch size: 46, lr: 5.31e-03, grad_scale: 8.0 +2023-03-09 14:45:41,972 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6751, 3.5703, 4.9090, 4.2730, 3.3033, 2.9096, 4.3976, 5.1514], + device='cuda:1'), covar=tensor([0.0763, 0.1555, 0.0202, 0.0438, 0.0933, 0.1241, 0.0387, 0.0206], + device='cuda:1'), in_proj_covar=tensor([0.0147, 0.0275, 0.0154, 0.0182, 0.0191, 0.0190, 0.0195, 0.0196], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 14:45:51,518 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76294.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:46:42,826 INFO [train.py:898] (1/4) Epoch 22, batch 0, loss[loss=0.1637, simple_loss=0.2583, pruned_loss=0.03454, over 18473.00 frames. ], tot_loss[loss=0.1637, simple_loss=0.2583, pruned_loss=0.03454, over 18473.00 frames. ], batch size: 53, lr: 5.18e-03, grad_scale: 8.0 +2023-03-09 14:46:42,826 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 14:46:54,609 INFO [train.py:932] (1/4) Epoch 22, validation: loss=0.1504, simple_loss=0.25, pruned_loss=0.02541, over 944034.00 frames. +2023-03-09 14:46:54,610 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 14:46:57,493 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-09 14:47:04,743 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76323.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 14:47:13,364 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76330.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 14:47:39,559 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.45 vs. limit=5.0 +2023-03-09 14:47:42,735 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76355.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:47:53,546 INFO [train.py:898] (1/4) Epoch 22, batch 50, loss[loss=0.1659, simple_loss=0.258, pruned_loss=0.03689, over 17895.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2516, pruned_loss=0.03575, over 813912.57 frames. ], batch size: 65, lr: 5.18e-03, grad_scale: 8.0 +2023-03-09 14:48:11,539 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.962e+02 2.572e+02 3.075e+02 3.753e+02 8.462e+02, threshold=6.150e+02, percent-clipped=5.0 +2023-03-09 14:48:16,367 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76384.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 14:48:52,272 INFO [train.py:898] (1/4) Epoch 22, batch 100, loss[loss=0.1593, simple_loss=0.2497, pruned_loss=0.03447, over 18500.00 frames. ], tot_loss[loss=0.1616, simple_loss=0.2515, pruned_loss=0.03583, over 1429280.44 frames. ], batch size: 51, lr: 5.18e-03, grad_scale: 8.0 +2023-03-09 14:49:26,591 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8288, 4.9894, 4.9774, 4.6805, 4.6676, 4.7294, 5.0519, 5.0007], + device='cuda:1'), covar=tensor([0.0086, 0.0078, 0.0070, 0.0129, 0.0070, 0.0150, 0.0106, 0.0129], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0068, 0.0073, 0.0091, 0.0073, 0.0102, 0.0085, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 14:49:49,357 INFO [train.py:898] (1/4) Epoch 22, batch 150, loss[loss=0.1793, simple_loss=0.27, pruned_loss=0.0443, over 18258.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2531, pruned_loss=0.03629, over 1915670.54 frames. ], batch size: 57, lr: 5.18e-03, grad_scale: 8.0 +2023-03-09 14:50:05,945 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.617e+02 2.647e+02 3.251e+02 3.870e+02 7.271e+02, threshold=6.503e+02, percent-clipped=3.0 +2023-03-09 14:50:46,569 INFO [train.py:898] (1/4) Epoch 22, batch 200, loss[loss=0.147, simple_loss=0.2339, pruned_loss=0.03003, over 18430.00 frames. ], tot_loss[loss=0.1622, simple_loss=0.2524, pruned_loss=0.03604, over 2301277.83 frames. ], batch size: 43, lr: 5.18e-03, grad_scale: 8.0 +2023-03-09 14:51:45,316 INFO [train.py:898] (1/4) Epoch 22, batch 250, loss[loss=0.1682, simple_loss=0.262, pruned_loss=0.03721, over 16082.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2523, pruned_loss=0.0361, over 2580068.85 frames. ], batch size: 94, lr: 5.18e-03, grad_scale: 8.0 +2023-03-09 14:52:02,089 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.820e+02 2.535e+02 2.870e+02 3.429e+02 6.205e+02, threshold=5.739e+02, percent-clipped=0.0 +2023-03-09 14:52:44,277 INFO [train.py:898] (1/4) Epoch 22, batch 300, loss[loss=0.1639, simple_loss=0.2617, pruned_loss=0.03308, over 18555.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.2528, pruned_loss=0.036, over 2814002.28 frames. ], batch size: 54, lr: 5.17e-03, grad_scale: 8.0 +2023-03-09 14:52:55,644 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76625.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 14:53:25,036 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76650.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:53:42,869 INFO [train.py:898] (1/4) Epoch 22, batch 350, loss[loss=0.1835, simple_loss=0.2743, pruned_loss=0.04634, over 16037.00 frames. ], tot_loss[loss=0.1622, simple_loss=0.2526, pruned_loss=0.03592, over 2991791.31 frames. ], batch size: 94, lr: 5.17e-03, grad_scale: 8.0 +2023-03-09 14:53:58,884 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76679.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 14:53:59,705 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.678e+02 2.562e+02 3.001e+02 3.817e+02 1.277e+03, threshold=6.002e+02, percent-clipped=2.0 +2023-03-09 14:54:41,498 INFO [train.py:898] (1/4) Epoch 22, batch 400, loss[loss=0.1479, simple_loss=0.238, pruned_loss=0.02893, over 18245.00 frames. ], tot_loss[loss=0.1612, simple_loss=0.2514, pruned_loss=0.03553, over 3129879.24 frames. ], batch size: 45, lr: 5.17e-03, grad_scale: 8.0 +2023-03-09 14:55:28,848 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-09 14:55:30,666 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-09 14:55:40,008 INFO [train.py:898] (1/4) Epoch 22, batch 450, loss[loss=0.148, simple_loss=0.2332, pruned_loss=0.03145, over 18368.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.2508, pruned_loss=0.0355, over 3228740.02 frames. ], batch size: 46, lr: 5.17e-03, grad_scale: 8.0 +2023-03-09 14:55:41,903 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-09 14:55:57,343 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.685e+02 2.597e+02 2.928e+02 3.376e+02 5.957e+02, threshold=5.857e+02, percent-clipped=0.0 +2023-03-09 14:56:28,268 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9199, 5.4556, 5.4094, 5.4478, 4.9153, 5.3525, 4.7158, 5.3258], + device='cuda:1'), covar=tensor([0.0242, 0.0245, 0.0192, 0.0422, 0.0409, 0.0228, 0.1091, 0.0354], + device='cuda:1'), in_proj_covar=tensor([0.0218, 0.0263, 0.0258, 0.0333, 0.0273, 0.0270, 0.0314, 0.0264], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 14:56:38,194 INFO [train.py:898] (1/4) Epoch 22, batch 500, loss[loss=0.1687, simple_loss=0.2682, pruned_loss=0.03457, over 18492.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2512, pruned_loss=0.03555, over 3303494.39 frames. ], batch size: 59, lr: 5.17e-03, grad_scale: 8.0 +2023-03-09 14:56:42,615 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8365, 3.6464, 4.9549, 2.7435, 4.4060, 2.5553, 2.9840, 1.7575], + device='cuda:1'), covar=tensor([0.1142, 0.0915, 0.0172, 0.1009, 0.0521, 0.2720, 0.2843, 0.2227], + device='cuda:1'), in_proj_covar=tensor([0.0219, 0.0246, 0.0192, 0.0198, 0.0258, 0.0272, 0.0322, 0.0235], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 14:57:06,174 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76839.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:57:12,986 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8826, 3.5683, 5.0962, 3.0392, 4.5519, 2.5609, 3.0372, 1.8175], + device='cuda:1'), covar=tensor([0.1095, 0.0875, 0.0137, 0.0808, 0.0448, 0.2447, 0.2505, 0.2038], + device='cuda:1'), in_proj_covar=tensor([0.0218, 0.0245, 0.0191, 0.0197, 0.0256, 0.0270, 0.0319, 0.0233], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 14:57:36,633 INFO [train.py:898] (1/4) Epoch 22, batch 550, loss[loss=0.1688, simple_loss=0.2551, pruned_loss=0.04121, over 18313.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2517, pruned_loss=0.03594, over 3362238.22 frames. ], batch size: 57, lr: 5.17e-03, grad_scale: 8.0 +2023-03-09 14:57:53,065 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0715, 5.5297, 5.2427, 5.3504, 5.1378, 5.0425, 5.6060, 5.5167], + device='cuda:1'), covar=tensor([0.1050, 0.0803, 0.0663, 0.0702, 0.1469, 0.0654, 0.0618, 0.0783], + device='cuda:1'), in_proj_covar=tensor([0.0604, 0.0527, 0.0380, 0.0546, 0.0733, 0.0545, 0.0746, 0.0565], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:1') +2023-03-09 14:57:53,941 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.970e+02 2.654e+02 3.120e+02 3.965e+02 8.304e+02, threshold=6.239e+02, percent-clipped=1.0 +2023-03-09 14:58:17,066 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76900.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:58:34,771 INFO [train.py:898] (1/4) Epoch 22, batch 600, loss[loss=0.1773, simple_loss=0.2711, pruned_loss=0.04174, over 18280.00 frames. ], tot_loss[loss=0.1619, simple_loss=0.2522, pruned_loss=0.03583, over 3415955.90 frames. ], batch size: 57, lr: 5.16e-03, grad_scale: 8.0 +2023-03-09 14:58:47,325 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76925.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 14:59:14,888 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76950.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:59:32,244 INFO [train.py:898] (1/4) Epoch 22, batch 650, loss[loss=0.1393, simple_loss=0.2177, pruned_loss=0.03045, over 17683.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2517, pruned_loss=0.03595, over 3466906.23 frames. ], batch size: 39, lr: 5.16e-03, grad_scale: 8.0 +2023-03-09 14:59:42,055 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76973.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 14:59:49,470 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76979.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 14:59:50,127 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.663e+02 2.460e+02 2.912e+02 3.515e+02 5.898e+02, threshold=5.824e+02, percent-clipped=0.0 +2023-03-09 15:00:10,803 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76998.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:00:30,420 INFO [train.py:898] (1/4) Epoch 22, batch 700, loss[loss=0.169, simple_loss=0.2559, pruned_loss=0.041, over 17699.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2516, pruned_loss=0.03597, over 3495439.05 frames. ], batch size: 70, lr: 5.16e-03, grad_scale: 8.0 +2023-03-09 15:00:31,853 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3499, 5.3250, 4.9360, 5.2437, 5.2887, 4.6241, 5.1453, 4.9422], + device='cuda:1'), covar=tensor([0.0457, 0.0445, 0.1452, 0.0817, 0.0565, 0.0468, 0.0496, 0.1022], + device='cuda:1'), in_proj_covar=tensor([0.0498, 0.0559, 0.0700, 0.0436, 0.0448, 0.0508, 0.0542, 0.0672], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 15:00:42,807 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7362, 2.4707, 2.6562, 2.7980, 3.3314, 5.0222, 4.9173, 3.4828], + device='cuda:1'), covar=tensor([0.1880, 0.2472, 0.3071, 0.1851, 0.2380, 0.0217, 0.0360, 0.0982], + device='cuda:1'), in_proj_covar=tensor([0.0305, 0.0349, 0.0387, 0.0281, 0.0393, 0.0245, 0.0298, 0.0257], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 15:00:43,766 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0453, 5.1774, 5.1424, 4.8700, 4.8683, 4.8942, 5.2417, 5.2580], + device='cuda:1'), covar=tensor([0.0079, 0.0052, 0.0064, 0.0106, 0.0060, 0.0163, 0.0064, 0.0082], + device='cuda:1'), in_proj_covar=tensor([0.0094, 0.0069, 0.0074, 0.0093, 0.0075, 0.0104, 0.0086, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 15:00:45,313 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77027.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 15:01:29,146 INFO [train.py:898] (1/4) Epoch 22, batch 750, loss[loss=0.1689, simple_loss=0.2652, pruned_loss=0.03627, over 17827.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2525, pruned_loss=0.0364, over 3510995.13 frames. ], batch size: 70, lr: 5.16e-03, grad_scale: 8.0 +2023-03-09 15:01:47,410 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.651e+02 2.728e+02 3.361e+02 4.039e+02 1.393e+03, threshold=6.722e+02, percent-clipped=6.0 +2023-03-09 15:02:05,338 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77095.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:02:27,379 INFO [train.py:898] (1/4) Epoch 22, batch 800, loss[loss=0.199, simple_loss=0.2909, pruned_loss=0.05354, over 18374.00 frames. ], tot_loss[loss=0.163, simple_loss=0.253, pruned_loss=0.03647, over 3522802.17 frames. ], batch size: 56, lr: 5.16e-03, grad_scale: 8.0 +2023-03-09 15:02:46,089 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1467, 2.5887, 2.3813, 2.6164, 3.2808, 3.0614, 2.8313, 2.7157], + device='cuda:1'), covar=tensor([0.0256, 0.0249, 0.0572, 0.0408, 0.0203, 0.0210, 0.0393, 0.0360], + device='cuda:1'), in_proj_covar=tensor([0.0141, 0.0135, 0.0166, 0.0160, 0.0131, 0.0119, 0.0155, 0.0157], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 15:02:51,240 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.5025, 3.8562, 2.2785, 3.6572, 4.8167, 2.3708, 3.4770, 3.7033], + device='cuda:1'), covar=tensor([0.0233, 0.1124, 0.1783, 0.0719, 0.0100, 0.1371, 0.0776, 0.0755], + device='cuda:1'), in_proj_covar=tensor([0.0169, 0.0274, 0.0206, 0.0196, 0.0127, 0.0184, 0.0216, 0.0223], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 15:03:16,274 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77156.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 15:03:25,832 INFO [train.py:898] (1/4) Epoch 22, batch 850, loss[loss=0.1578, simple_loss=0.2526, pruned_loss=0.03153, over 18490.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2529, pruned_loss=0.03641, over 3546140.75 frames. ], batch size: 53, lr: 5.16e-03, grad_scale: 8.0 +2023-03-09 15:03:43,940 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.868e+02 2.628e+02 3.238e+02 3.999e+02 6.972e+02, threshold=6.476e+02, percent-clipped=1.0 +2023-03-09 15:04:02,039 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77195.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:04:19,479 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8605, 4.5057, 4.5300, 3.4839, 3.7182, 3.4289, 2.6885, 2.4630], + device='cuda:1'), covar=tensor([0.0188, 0.0144, 0.0070, 0.0279, 0.0320, 0.0214, 0.0682, 0.0844], + device='cuda:1'), in_proj_covar=tensor([0.0070, 0.0059, 0.0062, 0.0068, 0.0088, 0.0066, 0.0077, 0.0083], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 15:04:20,613 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77211.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:04:24,976 INFO [train.py:898] (1/4) Epoch 22, batch 900, loss[loss=0.153, simple_loss=0.2465, pruned_loss=0.02971, over 16130.00 frames. ], tot_loss[loss=0.163, simple_loss=0.253, pruned_loss=0.03645, over 3552008.42 frames. ], batch size: 94, lr: 5.15e-03, grad_scale: 8.0 +2023-03-09 15:05:20,894 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5452, 3.2964, 4.3530, 3.9891, 3.1027, 2.9296, 3.9043, 4.5065], + device='cuda:1'), covar=tensor([0.0846, 0.1416, 0.0268, 0.0419, 0.0988, 0.1165, 0.0474, 0.0316], + device='cuda:1'), in_proj_covar=tensor([0.0148, 0.0274, 0.0154, 0.0181, 0.0191, 0.0190, 0.0196, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 15:05:23,899 INFO [train.py:898] (1/4) Epoch 22, batch 950, loss[loss=0.1452, simple_loss=0.2317, pruned_loss=0.02933, over 18275.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2531, pruned_loss=0.03659, over 3543444.44 frames. ], batch size: 47, lr: 5.15e-03, grad_scale: 8.0 +2023-03-09 15:05:32,094 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77272.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:05:33,054 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77273.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:05:37,425 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77277.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:05:41,082 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.729e+02 2.727e+02 3.251e+02 3.771e+02 1.514e+03, threshold=6.501e+02, percent-clipped=4.0 +2023-03-09 15:06:22,532 INFO [train.py:898] (1/4) Epoch 22, batch 1000, loss[loss=0.1571, simple_loss=0.2388, pruned_loss=0.03763, over 18106.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2525, pruned_loss=0.03655, over 3556816.76 frames. ], batch size: 40, lr: 5.15e-03, grad_scale: 8.0 +2023-03-09 15:06:45,397 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77334.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:06:50,075 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77338.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:06:55,178 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5097, 2.7257, 4.1576, 3.5475, 2.7760, 4.3906, 3.7539, 2.7798], + device='cuda:1'), covar=tensor([0.0523, 0.1497, 0.0279, 0.0461, 0.1390, 0.0219, 0.0619, 0.1021], + device='cuda:1'), in_proj_covar=tensor([0.0214, 0.0240, 0.0214, 0.0164, 0.0224, 0.0214, 0.0249, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 15:07:21,344 INFO [train.py:898] (1/4) Epoch 22, batch 1050, loss[loss=0.1669, simple_loss=0.2585, pruned_loss=0.03767, over 18537.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2522, pruned_loss=0.03659, over 3549077.24 frames. ], batch size: 49, lr: 5.15e-03, grad_scale: 8.0 +2023-03-09 15:07:38,060 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.901e+02 2.608e+02 3.171e+02 4.041e+02 8.045e+02, threshold=6.343e+02, percent-clipped=4.0 +2023-03-09 15:07:47,872 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-09 15:08:19,734 INFO [train.py:898] (1/4) Epoch 22, batch 1100, loss[loss=0.1376, simple_loss=0.2236, pruned_loss=0.02578, over 18583.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2523, pruned_loss=0.03642, over 3559672.92 frames. ], batch size: 45, lr: 5.15e-03, grad_scale: 8.0 +2023-03-09 15:08:51,998 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7281, 4.0207, 2.3442, 4.0202, 4.9953, 2.6039, 3.6061, 3.7774], + device='cuda:1'), covar=tensor([0.0186, 0.1088, 0.1631, 0.0580, 0.0085, 0.1162, 0.0721, 0.0788], + device='cuda:1'), in_proj_covar=tensor([0.0168, 0.0273, 0.0204, 0.0196, 0.0128, 0.0184, 0.0215, 0.0224], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 15:09:02,443 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77451.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 15:09:18,512 INFO [train.py:898] (1/4) Epoch 22, batch 1150, loss[loss=0.1581, simple_loss=0.2486, pruned_loss=0.03378, over 18299.00 frames. ], tot_loss[loss=0.1632, simple_loss=0.2528, pruned_loss=0.0368, over 3558824.60 frames. ], batch size: 54, lr: 5.15e-03, grad_scale: 8.0 +2023-03-09 15:09:35,428 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.520e+02 2.405e+02 2.730e+02 3.168e+02 5.091e+02, threshold=5.460e+02, percent-clipped=0.0 +2023-03-09 15:09:52,926 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77495.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:10:16,605 INFO [train.py:898] (1/4) Epoch 22, batch 1200, loss[loss=0.136, simple_loss=0.2223, pruned_loss=0.02489, over 17613.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2523, pruned_loss=0.03643, over 3574025.05 frames. ], batch size: 39, lr: 5.14e-03, grad_scale: 8.0 +2023-03-09 15:10:46,546 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6693, 3.7055, 3.5380, 3.2219, 3.4140, 2.7967, 2.8006, 3.7431], + device='cuda:1'), covar=tensor([0.0061, 0.0079, 0.0078, 0.0125, 0.0089, 0.0178, 0.0201, 0.0057], + device='cuda:1'), in_proj_covar=tensor([0.0140, 0.0159, 0.0134, 0.0186, 0.0142, 0.0179, 0.0181, 0.0121], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 15:10:47,087 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-03-09 15:10:48,602 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77543.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:10:58,126 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0439, 5.1329, 5.1094, 4.8264, 4.9216, 4.9042, 5.1716, 5.2025], + device='cuda:1'), covar=tensor([0.0070, 0.0055, 0.0063, 0.0105, 0.0059, 0.0129, 0.0091, 0.0108], + device='cuda:1'), in_proj_covar=tensor([0.0094, 0.0069, 0.0074, 0.0092, 0.0075, 0.0103, 0.0086, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 15:11:15,449 INFO [train.py:898] (1/4) Epoch 22, batch 1250, loss[loss=0.1808, simple_loss=0.2616, pruned_loss=0.05, over 18294.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2525, pruned_loss=0.0368, over 3564863.79 frames. ], batch size: 57, lr: 5.14e-03, grad_scale: 8.0 +2023-03-09 15:11:18,454 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77567.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:11:32,784 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.900e+02 2.671e+02 3.249e+02 3.869e+02 7.848e+02, threshold=6.498e+02, percent-clipped=7.0 +2023-03-09 15:11:43,183 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7094, 2.2841, 2.6860, 2.6561, 3.1154, 4.8343, 4.6849, 3.3476], + device='cuda:1'), covar=tensor([0.1721, 0.2512, 0.2903, 0.1906, 0.2462, 0.0218, 0.0388, 0.0970], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0350, 0.0387, 0.0281, 0.0394, 0.0245, 0.0297, 0.0257], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 15:12:13,381 INFO [train.py:898] (1/4) Epoch 22, batch 1300, loss[loss=0.1521, simple_loss=0.2468, pruned_loss=0.02872, over 18405.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2523, pruned_loss=0.03663, over 3568030.57 frames. ], batch size: 52, lr: 5.14e-03, grad_scale: 8.0 +2023-03-09 15:12:14,905 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77616.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:12:30,216 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77629.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:12:34,905 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77633.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:13:12,279 INFO [train.py:898] (1/4) Epoch 22, batch 1350, loss[loss=0.1515, simple_loss=0.2382, pruned_loss=0.0324, over 18493.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2521, pruned_loss=0.03664, over 3557725.23 frames. ], batch size: 47, lr: 5.14e-03, grad_scale: 8.0 +2023-03-09 15:13:26,461 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77677.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:13:26,506 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77677.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:13:29,476 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.757e+02 2.485e+02 2.994e+02 3.704e+02 6.281e+02, threshold=5.988e+02, percent-clipped=0.0 +2023-03-09 15:14:08,693 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6877, 2.3031, 2.6155, 2.5930, 3.2502, 4.8373, 4.6568, 3.4086], + device='cuda:1'), covar=tensor([0.1897, 0.2662, 0.3077, 0.2038, 0.2434, 0.0249, 0.0389, 0.0972], + device='cuda:1'), in_proj_covar=tensor([0.0305, 0.0349, 0.0387, 0.0281, 0.0393, 0.0245, 0.0298, 0.0257], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 15:14:10,447 INFO [train.py:898] (1/4) Epoch 22, batch 1400, loss[loss=0.1559, simple_loss=0.2365, pruned_loss=0.03766, over 18535.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2524, pruned_loss=0.03691, over 3566159.97 frames. ], batch size: 47, lr: 5.14e-03, grad_scale: 4.0 +2023-03-09 15:14:36,611 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77738.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:14:50,911 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77751.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 15:15:07,977 INFO [train.py:898] (1/4) Epoch 22, batch 1450, loss[loss=0.1625, simple_loss=0.2571, pruned_loss=0.03391, over 18479.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2523, pruned_loss=0.03662, over 3577167.31 frames. ], batch size: 53, lr: 5.14e-03, grad_scale: 4.0 +2023-03-09 15:15:26,459 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.943e+02 2.691e+02 3.335e+02 4.479e+02 1.440e+03, threshold=6.670e+02, percent-clipped=5.0 +2023-03-09 15:15:46,838 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77799.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:16:06,816 INFO [train.py:898] (1/4) Epoch 22, batch 1500, loss[loss=0.148, simple_loss=0.2335, pruned_loss=0.03119, over 18266.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2527, pruned_loss=0.03671, over 3568012.26 frames. ], batch size: 45, lr: 5.13e-03, grad_scale: 4.0 +2023-03-09 15:17:00,132 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6754, 3.5296, 2.2294, 4.4249, 3.2317, 4.0613, 2.2834, 3.8443], + device='cuda:1'), covar=tensor([0.0540, 0.0830, 0.1410, 0.0540, 0.0725, 0.0289, 0.1441, 0.0493], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0228, 0.0192, 0.0289, 0.0193, 0.0265, 0.0204, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 15:17:04,737 INFO [train.py:898] (1/4) Epoch 22, batch 1550, loss[loss=0.1692, simple_loss=0.2674, pruned_loss=0.0355, over 18320.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2527, pruned_loss=0.03678, over 3568333.50 frames. ], batch size: 54, lr: 5.13e-03, grad_scale: 4.0 +2023-03-09 15:17:07,422 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77867.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:17:13,967 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5294, 6.1106, 5.5884, 5.8833, 5.7456, 5.5169, 6.1863, 6.1098], + device='cuda:1'), covar=tensor([0.1187, 0.0730, 0.0438, 0.0668, 0.1298, 0.0712, 0.0540, 0.0678], + device='cuda:1'), in_proj_covar=tensor([0.0621, 0.0539, 0.0388, 0.0560, 0.0756, 0.0559, 0.0767, 0.0583], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 15:17:23,122 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.906e+02 2.725e+02 3.263e+02 3.842e+02 6.752e+02, threshold=6.526e+02, percent-clipped=2.0 +2023-03-09 15:18:03,237 INFO [train.py:898] (1/4) Epoch 22, batch 1600, loss[loss=0.2092, simple_loss=0.2995, pruned_loss=0.05949, over 12822.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2528, pruned_loss=0.03688, over 3558732.61 frames. ], batch size: 131, lr: 5.13e-03, grad_scale: 8.0 +2023-03-09 15:18:03,403 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77915.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:18:04,783 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.2549, 3.2218, 2.0829, 3.8172, 2.7065, 3.4900, 2.4697, 3.2489], + device='cuda:1'), covar=tensor([0.0614, 0.0796, 0.1420, 0.0573, 0.0826, 0.0336, 0.1119, 0.0498], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0228, 0.0192, 0.0287, 0.0193, 0.0265, 0.0203, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 15:18:19,224 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77929.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:18:24,157 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77933.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:19:00,942 INFO [train.py:898] (1/4) Epoch 22, batch 1650, loss[loss=0.1493, simple_loss=0.242, pruned_loss=0.02831, over 18416.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2532, pruned_loss=0.03676, over 3558762.61 frames. ], batch size: 48, lr: 5.13e-03, grad_scale: 8.0 +2023-03-09 15:19:09,474 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77972.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:19:15,202 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77977.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:19:19,785 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77981.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:19:20,794 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.050e+02 2.525e+02 3.024e+02 3.859e+02 1.001e+03, threshold=6.048e+02, percent-clipped=3.0 +2023-03-09 15:20:03,103 INFO [train.py:898] (1/4) Epoch 22, batch 1700, loss[loss=0.1563, simple_loss=0.2433, pruned_loss=0.03469, over 18500.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2528, pruned_loss=0.03672, over 3556797.29 frames. ], batch size: 47, lr: 5.13e-03, grad_scale: 4.0 +2023-03-09 15:20:25,570 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78033.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:21:01,844 INFO [train.py:898] (1/4) Epoch 22, batch 1750, loss[loss=0.2019, simple_loss=0.2828, pruned_loss=0.06044, over 12448.00 frames. ], tot_loss[loss=0.1635, simple_loss=0.253, pruned_loss=0.03703, over 3543117.94 frames. ], batch size: 132, lr: 5.13e-03, grad_scale: 4.0 +2023-03-09 15:21:22,781 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.851e+02 2.720e+02 3.295e+02 3.984e+02 2.464e+03, threshold=6.591e+02, percent-clipped=4.0 +2023-03-09 15:22:00,368 INFO [train.py:898] (1/4) Epoch 22, batch 1800, loss[loss=0.1678, simple_loss=0.2689, pruned_loss=0.0334, over 18403.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2525, pruned_loss=0.03659, over 3554492.35 frames. ], batch size: 52, lr: 5.12e-03, grad_scale: 4.0 +2023-03-09 15:22:43,215 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-09 15:22:58,103 INFO [train.py:898] (1/4) Epoch 22, batch 1850, loss[loss=0.1725, simple_loss=0.2677, pruned_loss=0.0386, over 18251.00 frames. ], tot_loss[loss=0.1622, simple_loss=0.252, pruned_loss=0.03622, over 3569744.09 frames. ], batch size: 60, lr: 5.12e-03, grad_scale: 4.0 +2023-03-09 15:23:08,768 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8567, 5.3557, 5.3013, 5.2842, 4.8195, 5.2159, 4.7166, 5.1915], + device='cuda:1'), covar=tensor([0.0232, 0.0226, 0.0180, 0.0475, 0.0418, 0.0224, 0.1012, 0.0313], + device='cuda:1'), in_proj_covar=tensor([0.0219, 0.0263, 0.0256, 0.0335, 0.0272, 0.0271, 0.0312, 0.0264], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 15:23:19,131 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.849e+02 2.526e+02 2.955e+02 3.559e+02 8.124e+02, threshold=5.910e+02, percent-clipped=2.0 +2023-03-09 15:23:57,003 INFO [train.py:898] (1/4) Epoch 22, batch 1900, loss[loss=0.1694, simple_loss=0.2587, pruned_loss=0.04006, over 18466.00 frames. ], tot_loss[loss=0.1622, simple_loss=0.2517, pruned_loss=0.03637, over 3569982.37 frames. ], batch size: 59, lr: 5.12e-03, grad_scale: 4.0 +2023-03-09 15:24:18,168 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78232.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:24:38,870 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 15:24:48,499 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.78 vs. limit=2.0 +2023-03-09 15:24:55,257 INFO [train.py:898] (1/4) Epoch 22, batch 1950, loss[loss=0.1618, simple_loss=0.2519, pruned_loss=0.03585, over 18568.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2528, pruned_loss=0.0369, over 3560433.78 frames. ], batch size: 54, lr: 5.12e-03, grad_scale: 4.0 +2023-03-09 15:25:03,318 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78272.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:25:14,974 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.944e+02 2.648e+02 3.049e+02 3.777e+02 6.458e+02, threshold=6.098e+02, percent-clipped=2.0 +2023-03-09 15:25:25,564 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9040, 4.2909, 2.4805, 4.1297, 5.2537, 2.6595, 3.8021, 4.0302], + device='cuda:1'), covar=tensor([0.0170, 0.1058, 0.1659, 0.0616, 0.0083, 0.1187, 0.0677, 0.0707], + device='cuda:1'), in_proj_covar=tensor([0.0170, 0.0273, 0.0205, 0.0196, 0.0129, 0.0184, 0.0216, 0.0226], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 15:25:28,589 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78293.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 15:25:53,381 INFO [train.py:898] (1/4) Epoch 22, batch 2000, loss[loss=0.1389, simple_loss=0.2198, pruned_loss=0.02894, over 18428.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2527, pruned_loss=0.03692, over 3577384.23 frames. ], batch size: 43, lr: 5.12e-03, grad_scale: 8.0 +2023-03-09 15:25:59,290 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78320.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:26:08,757 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7168, 2.3373, 2.7228, 2.7352, 3.2114, 4.8734, 4.6685, 3.8469], + device='cuda:1'), covar=tensor([0.1863, 0.2524, 0.3034, 0.1914, 0.2462, 0.0235, 0.0407, 0.0765], + device='cuda:1'), in_proj_covar=tensor([0.0306, 0.0350, 0.0387, 0.0281, 0.0394, 0.0248, 0.0298, 0.0258], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 15:26:14,841 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78333.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:26:52,353 INFO [train.py:898] (1/4) Epoch 22, batch 2050, loss[loss=0.1771, simple_loss=0.2791, pruned_loss=0.03758, over 18388.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2531, pruned_loss=0.03706, over 3558565.63 frames. ], batch size: 50, lr: 5.12e-03, grad_scale: 8.0 +2023-03-09 15:26:58,471 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5395, 2.2367, 2.4444, 2.5408, 3.0069, 4.7561, 4.6594, 3.7206], + device='cuda:1'), covar=tensor([0.1964, 0.2594, 0.3182, 0.1948, 0.2806, 0.0264, 0.0382, 0.0753], + device='cuda:1'), in_proj_covar=tensor([0.0304, 0.0347, 0.0384, 0.0279, 0.0391, 0.0246, 0.0296, 0.0257], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 15:27:05,009 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0530, 3.4048, 3.3435, 2.9101, 3.0121, 2.9101, 2.4001, 2.2726], + device='cuda:1'), covar=tensor([0.0239, 0.0156, 0.0120, 0.0265, 0.0302, 0.0225, 0.0604, 0.0677], + device='cuda:1'), in_proj_covar=tensor([0.0070, 0.0060, 0.0062, 0.0069, 0.0089, 0.0066, 0.0077, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 15:27:10,377 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78381.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:27:11,260 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.641e+02 2.890e+02 3.341e+02 3.968e+02 7.791e+02, threshold=6.681e+02, percent-clipped=3.0 +2023-03-09 15:27:46,847 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2418, 3.7179, 2.4709, 3.5324, 4.4781, 2.5160, 3.2715, 3.5089], + device='cuda:1'), covar=tensor([0.0271, 0.1196, 0.1639, 0.0774, 0.0151, 0.1253, 0.0935, 0.0978], + device='cuda:1'), in_proj_covar=tensor([0.0171, 0.0275, 0.0206, 0.0198, 0.0130, 0.0186, 0.0218, 0.0228], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 15:27:50,920 INFO [train.py:898] (1/4) Epoch 22, batch 2100, loss[loss=0.1686, simple_loss=0.2689, pruned_loss=0.03409, over 18572.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.2519, pruned_loss=0.0365, over 3556088.00 frames. ], batch size: 54, lr: 5.11e-03, grad_scale: 8.0 +2023-03-09 15:28:10,263 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8965, 3.2104, 4.4825, 3.8090, 3.2150, 4.8065, 4.0896, 3.2167], + device='cuda:1'), covar=tensor([0.0446, 0.1183, 0.0312, 0.0426, 0.1260, 0.0148, 0.0485, 0.0853], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0237, 0.0213, 0.0163, 0.0222, 0.0211, 0.0246, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 15:28:35,796 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8414, 4.1143, 2.4623, 3.9536, 5.2020, 2.5174, 3.7889, 3.9523], + device='cuda:1'), covar=tensor([0.0180, 0.1045, 0.1641, 0.0647, 0.0086, 0.1272, 0.0686, 0.0706], + device='cuda:1'), in_proj_covar=tensor([0.0172, 0.0275, 0.0207, 0.0198, 0.0130, 0.0186, 0.0218, 0.0228], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 15:28:43,432 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78460.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:28:49,234 INFO [train.py:898] (1/4) Epoch 22, batch 2150, loss[loss=0.1735, simple_loss=0.2709, pruned_loss=0.03803, over 18372.00 frames. ], tot_loss[loss=0.162, simple_loss=0.2515, pruned_loss=0.03619, over 3569794.16 frames. ], batch size: 55, lr: 5.11e-03, grad_scale: 8.0 +2023-03-09 15:29:08,265 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.757e+02 2.625e+02 3.036e+02 3.633e+02 1.479e+03, threshold=6.073e+02, percent-clipped=4.0 +2023-03-09 15:29:47,486 INFO [train.py:898] (1/4) Epoch 22, batch 2200, loss[loss=0.1296, simple_loss=0.2178, pruned_loss=0.02073, over 18491.00 frames. ], tot_loss[loss=0.1622, simple_loss=0.252, pruned_loss=0.03623, over 3575448.92 frames. ], batch size: 47, lr: 5.11e-03, grad_scale: 8.0 +2023-03-09 15:29:55,164 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78521.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:30:35,266 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78556.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:30:44,767 INFO [train.py:898] (1/4) Epoch 22, batch 2250, loss[loss=0.1635, simple_loss=0.2591, pruned_loss=0.03395, over 18303.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2523, pruned_loss=0.03617, over 3594752.68 frames. ], batch size: 54, lr: 5.11e-03, grad_scale: 8.0 +2023-03-09 15:31:04,899 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.048e+02 2.745e+02 3.101e+02 3.633e+02 7.718e+02, threshold=6.202e+02, percent-clipped=2.0 +2023-03-09 15:31:11,566 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78588.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 15:31:13,093 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-03-09 15:31:33,882 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7771, 3.5813, 4.9031, 4.3558, 3.2896, 2.9500, 4.3897, 5.1518], + device='cuda:1'), covar=tensor([0.0807, 0.1516, 0.0190, 0.0392, 0.0951, 0.1226, 0.0373, 0.0175], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0277, 0.0157, 0.0182, 0.0193, 0.0192, 0.0197, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 15:31:43,707 INFO [train.py:898] (1/4) Epoch 22, batch 2300, loss[loss=0.1599, simple_loss=0.2482, pruned_loss=0.0358, over 18395.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2525, pruned_loss=0.03657, over 3588763.13 frames. ], batch size: 52, lr: 5.11e-03, grad_scale: 8.0 +2023-03-09 15:31:46,283 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78617.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:32:07,806 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.71 vs. limit=2.0 +2023-03-09 15:32:23,045 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-09 15:32:28,123 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0645, 5.5909, 5.5497, 5.5296, 5.0631, 5.4521, 4.9056, 5.4659], + device='cuda:1'), covar=tensor([0.0213, 0.0215, 0.0153, 0.0401, 0.0370, 0.0207, 0.0949, 0.0251], + device='cuda:1'), in_proj_covar=tensor([0.0221, 0.0266, 0.0260, 0.0338, 0.0276, 0.0274, 0.0315, 0.0264], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 15:32:38,400 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-03-09 15:32:42,003 INFO [train.py:898] (1/4) Epoch 22, batch 2350, loss[loss=0.1714, simple_loss=0.2625, pruned_loss=0.04018, over 16119.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2521, pruned_loss=0.0362, over 3585664.54 frames. ], batch size: 94, lr: 5.11e-03, grad_scale: 8.0 +2023-03-09 15:33:01,735 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.907e+02 2.584e+02 2.934e+02 3.285e+02 5.590e+02, threshold=5.868e+02, percent-clipped=0.0 +2023-03-09 15:33:23,358 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9317, 5.3921, 5.3952, 5.4101, 4.8951, 5.3212, 4.6728, 5.2878], + device='cuda:1'), covar=tensor([0.0249, 0.0327, 0.0216, 0.0368, 0.0402, 0.0250, 0.1182, 0.0348], + device='cuda:1'), in_proj_covar=tensor([0.0222, 0.0267, 0.0261, 0.0338, 0.0277, 0.0275, 0.0316, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 15:33:35,071 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9443, 4.2185, 4.1862, 4.2247, 3.8445, 4.1105, 3.8122, 4.1566], + device='cuda:1'), covar=tensor([0.0263, 0.0354, 0.0265, 0.0488, 0.0367, 0.0267, 0.0900, 0.0344], + device='cuda:1'), in_proj_covar=tensor([0.0221, 0.0266, 0.0260, 0.0338, 0.0276, 0.0275, 0.0315, 0.0265], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 15:33:40,401 INFO [train.py:898] (1/4) Epoch 22, batch 2400, loss[loss=0.161, simple_loss=0.2458, pruned_loss=0.03806, over 18286.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2527, pruned_loss=0.03638, over 3558973.82 frames. ], batch size: 49, lr: 5.10e-03, grad_scale: 8.0 +2023-03-09 15:34:39,526 INFO [train.py:898] (1/4) Epoch 22, batch 2450, loss[loss=0.1381, simple_loss=0.225, pruned_loss=0.0256, over 18405.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2528, pruned_loss=0.03624, over 3566972.50 frames. ], batch size: 43, lr: 5.10e-03, grad_scale: 4.0 +2023-03-09 15:34:43,656 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-09 15:34:54,679 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7268, 2.2622, 2.6511, 2.6129, 3.2410, 4.7851, 4.6405, 3.2624], + device='cuda:1'), covar=tensor([0.1827, 0.2459, 0.3052, 0.1907, 0.2420, 0.0245, 0.0397, 0.1019], + device='cuda:1'), in_proj_covar=tensor([0.0305, 0.0349, 0.0387, 0.0282, 0.0392, 0.0248, 0.0298, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 15:35:00,358 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.881e+02 2.648e+02 3.233e+02 3.743e+02 6.587e+02, threshold=6.467e+02, percent-clipped=2.0 +2023-03-09 15:35:38,713 INFO [train.py:898] (1/4) Epoch 22, batch 2500, loss[loss=0.155, simple_loss=0.2503, pruned_loss=0.02983, over 18622.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2515, pruned_loss=0.0358, over 3572091.14 frames. ], batch size: 52, lr: 5.10e-03, grad_scale: 4.0 +2023-03-09 15:35:40,041 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78816.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:36:37,296 INFO [train.py:898] (1/4) Epoch 22, batch 2550, loss[loss=0.1535, simple_loss=0.234, pruned_loss=0.0365, over 17643.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.2509, pruned_loss=0.03533, over 3589442.50 frames. ], batch size: 39, lr: 5.10e-03, grad_scale: 4.0 +2023-03-09 15:36:56,999 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.832e+02 2.718e+02 3.165e+02 3.980e+02 2.438e+03, threshold=6.330e+02, percent-clipped=4.0 +2023-03-09 15:37:03,556 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78888.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:37:12,509 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5142, 5.4937, 5.0761, 5.4526, 5.4500, 4.7747, 5.3601, 5.0741], + device='cuda:1'), covar=tensor([0.0390, 0.0402, 0.1295, 0.0693, 0.0558, 0.0446, 0.0380, 0.1121], + device='cuda:1'), in_proj_covar=tensor([0.0499, 0.0559, 0.0704, 0.0439, 0.0450, 0.0509, 0.0544, 0.0676], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 15:37:12,675 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9289, 3.6322, 4.9767, 4.4773, 3.0976, 2.9871, 4.3227, 5.2885], + device='cuda:1'), covar=tensor([0.0812, 0.1593, 0.0224, 0.0408, 0.1149, 0.1255, 0.0477, 0.0225], + device='cuda:1'), in_proj_covar=tensor([0.0151, 0.0277, 0.0158, 0.0183, 0.0194, 0.0192, 0.0197, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 15:37:30,392 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78912.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:37:34,109 INFO [train.py:898] (1/4) Epoch 22, batch 2600, loss[loss=0.1837, simple_loss=0.2736, pruned_loss=0.04693, over 18459.00 frames. ], tot_loss[loss=0.1616, simple_loss=0.2518, pruned_loss=0.03565, over 3601377.50 frames. ], batch size: 59, lr: 5.10e-03, grad_scale: 4.0 +2023-03-09 15:37:58,480 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78936.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:38:31,475 INFO [train.py:898] (1/4) Epoch 22, batch 2650, loss[loss=0.1617, simple_loss=0.249, pruned_loss=0.0372, over 18623.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2517, pruned_loss=0.03569, over 3602624.59 frames. ], batch size: 52, lr: 5.10e-03, grad_scale: 4.0 +2023-03-09 15:38:31,739 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78965.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:38:52,451 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.743e+02 2.628e+02 3.171e+02 3.718e+02 9.310e+02, threshold=6.343e+02, percent-clipped=1.0 +2023-03-09 15:39:16,038 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8556, 5.2840, 2.6756, 5.1512, 5.0335, 5.3359, 5.1663, 2.7799], + device='cuda:1'), covar=tensor([0.0231, 0.0073, 0.0828, 0.0081, 0.0071, 0.0066, 0.0080, 0.0948], + device='cuda:1'), in_proj_covar=tensor([0.0089, 0.0081, 0.0095, 0.0095, 0.0086, 0.0077, 0.0085, 0.0097], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 15:39:28,931 INFO [train.py:898] (1/4) Epoch 22, batch 2700, loss[loss=0.1667, simple_loss=0.2538, pruned_loss=0.03978, over 18373.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2513, pruned_loss=0.03541, over 3610847.72 frames. ], batch size: 50, lr: 5.09e-03, grad_scale: 4.0 +2023-03-09 15:39:31,628 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8236, 5.1815, 2.6567, 5.0416, 4.9047, 5.2347, 5.0259, 2.6473], + device='cuda:1'), covar=tensor([0.0231, 0.0081, 0.0764, 0.0081, 0.0076, 0.0067, 0.0088, 0.0984], + device='cuda:1'), in_proj_covar=tensor([0.0089, 0.0080, 0.0095, 0.0095, 0.0085, 0.0077, 0.0085, 0.0097], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 15:39:42,919 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79026.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:40:11,105 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-09 15:40:22,266 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.91 vs. limit=5.0 +2023-03-09 15:40:27,076 INFO [train.py:898] (1/4) Epoch 22, batch 2750, loss[loss=0.1442, simple_loss=0.2348, pruned_loss=0.02681, over 18251.00 frames. ], tot_loss[loss=0.1616, simple_loss=0.252, pruned_loss=0.03563, over 3599740.23 frames. ], batch size: 47, lr: 5.09e-03, grad_scale: 4.0 +2023-03-09 15:40:48,100 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.755e+02 2.673e+02 3.245e+02 3.869e+02 1.413e+03, threshold=6.491e+02, percent-clipped=1.0 +2023-03-09 15:41:25,105 INFO [train.py:898] (1/4) Epoch 22, batch 2800, loss[loss=0.1385, simple_loss=0.2169, pruned_loss=0.02999, over 18490.00 frames. ], tot_loss[loss=0.1622, simple_loss=0.2524, pruned_loss=0.03598, over 3583350.22 frames. ], batch size: 44, lr: 5.09e-03, grad_scale: 8.0 +2023-03-09 15:41:26,575 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79116.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:42:22,261 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79164.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:42:23,248 INFO [train.py:898] (1/4) Epoch 22, batch 2850, loss[loss=0.1446, simple_loss=0.2301, pruned_loss=0.02957, over 18405.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2519, pruned_loss=0.03586, over 3584753.37 frames. ], batch size: 42, lr: 5.09e-03, grad_scale: 8.0 +2023-03-09 15:42:45,531 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.599e+02 2.640e+02 3.106e+02 3.691e+02 7.833e+02, threshold=6.212e+02, percent-clipped=1.0 +2023-03-09 15:43:19,541 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79212.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:43:22,658 INFO [train.py:898] (1/4) Epoch 22, batch 2900, loss[loss=0.1476, simple_loss=0.239, pruned_loss=0.02814, over 18409.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2518, pruned_loss=0.03578, over 3587580.41 frames. ], batch size: 52, lr: 5.09e-03, grad_scale: 4.0 +2023-03-09 15:44:15,606 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79260.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:44:21,242 INFO [train.py:898] (1/4) Epoch 22, batch 2950, loss[loss=0.1718, simple_loss=0.2684, pruned_loss=0.03754, over 18004.00 frames. ], tot_loss[loss=0.1622, simple_loss=0.2522, pruned_loss=0.03605, over 3583930.74 frames. ], batch size: 65, lr: 5.09e-03, grad_scale: 4.0 +2023-03-09 15:44:42,795 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.956e+02 2.705e+02 3.045e+02 3.689e+02 6.352e+02, threshold=6.090e+02, percent-clipped=1.0 +2023-03-09 15:44:44,766 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79285.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 15:45:19,365 INFO [train.py:898] (1/4) Epoch 22, batch 3000, loss[loss=0.1537, simple_loss=0.2476, pruned_loss=0.02987, over 18489.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2525, pruned_loss=0.03604, over 3577453.33 frames. ], batch size: 53, lr: 5.09e-03, grad_scale: 4.0 +2023-03-09 15:45:19,366 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 15:45:31,241 INFO [train.py:932] (1/4) Epoch 22, validation: loss=0.1498, simple_loss=0.249, pruned_loss=0.02526, over 944034.00 frames. +2023-03-09 15:45:31,241 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 15:45:38,860 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79321.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:45:53,429 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79333.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:46:08,241 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79346.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 15:46:28,633 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79364.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:46:29,399 INFO [train.py:898] (1/4) Epoch 22, batch 3050, loss[loss=0.1424, simple_loss=0.2274, pruned_loss=0.02876, over 18532.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2515, pruned_loss=0.03596, over 3591803.42 frames. ], batch size: 44, lr: 5.08e-03, grad_scale: 4.0 +2023-03-09 15:46:41,853 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79375.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 15:46:48,347 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3192, 5.2752, 4.9442, 5.1604, 5.2424, 4.5791, 5.1329, 4.9084], + device='cuda:1'), covar=tensor([0.0452, 0.0495, 0.1286, 0.0948, 0.0653, 0.0442, 0.0447, 0.1093], + device='cuda:1'), in_proj_covar=tensor([0.0500, 0.0560, 0.0704, 0.0442, 0.0454, 0.0515, 0.0547, 0.0680], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 15:46:52,687 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.974e+02 2.697e+02 3.145e+02 3.968e+02 7.194e+02, threshold=6.290e+02, percent-clipped=6.0 +2023-03-09 15:47:04,569 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79394.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:47:21,141 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8402, 5.2963, 4.9119, 5.0669, 4.9886, 4.8419, 5.3767, 5.3177], + device='cuda:1'), covar=tensor([0.1301, 0.0825, 0.0976, 0.0796, 0.1483, 0.0861, 0.0708, 0.0767], + device='cuda:1'), in_proj_covar=tensor([0.0626, 0.0540, 0.0388, 0.0567, 0.0754, 0.0562, 0.0775, 0.0586], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 15:47:28,488 INFO [train.py:898] (1/4) Epoch 22, batch 3100, loss[loss=0.1501, simple_loss=0.2456, pruned_loss=0.02725, over 18384.00 frames. ], tot_loss[loss=0.1621, simple_loss=0.2518, pruned_loss=0.0362, over 3592473.17 frames. ], batch size: 50, lr: 5.08e-03, grad_scale: 4.0 +2023-03-09 15:47:28,755 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2344, 4.4912, 4.4991, 4.7156, 4.1588, 4.5387, 3.5627, 4.5069], + device='cuda:1'), covar=tensor([0.0342, 0.0614, 0.0449, 0.0462, 0.0462, 0.0328, 0.1950, 0.0462], + device='cuda:1'), in_proj_covar=tensor([0.0220, 0.0265, 0.0259, 0.0336, 0.0275, 0.0274, 0.0314, 0.0263], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 15:47:40,604 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79425.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:47:48,817 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-09 15:47:54,517 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79436.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 15:48:01,283 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7755, 4.8400, 4.8796, 4.5880, 4.7083, 4.6893, 4.8814, 4.9657], + device='cuda:1'), covar=tensor([0.0079, 0.0064, 0.0062, 0.0117, 0.0063, 0.0165, 0.0080, 0.0086], + device='cuda:1'), in_proj_covar=tensor([0.0096, 0.0071, 0.0076, 0.0095, 0.0076, 0.0106, 0.0089, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 15:48:27,404 INFO [train.py:898] (1/4) Epoch 22, batch 3150, loss[loss=0.1981, simple_loss=0.2807, pruned_loss=0.05781, over 12305.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2521, pruned_loss=0.03652, over 3583245.95 frames. ], batch size: 129, lr: 5.08e-03, grad_scale: 4.0 +2023-03-09 15:48:50,535 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.854e+02 2.702e+02 3.215e+02 3.838e+02 6.492e+02, threshold=6.430e+02, percent-clipped=3.0 +2023-03-09 15:49:25,661 INFO [train.py:898] (1/4) Epoch 22, batch 3200, loss[loss=0.1621, simple_loss=0.2607, pruned_loss=0.03181, over 17782.00 frames. ], tot_loss[loss=0.1621, simple_loss=0.2518, pruned_loss=0.03623, over 3587242.37 frames. ], batch size: 70, lr: 5.08e-03, grad_scale: 8.0 +2023-03-09 15:49:55,877 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6355, 2.9579, 4.3877, 3.5914, 2.7853, 4.6562, 3.8643, 2.9522], + device='cuda:1'), covar=tensor([0.0565, 0.1453, 0.0307, 0.0472, 0.1516, 0.0205, 0.0677, 0.0913], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0237, 0.0211, 0.0165, 0.0223, 0.0211, 0.0248, 0.0195], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 15:50:24,047 INFO [train.py:898] (1/4) Epoch 22, batch 3250, loss[loss=0.1849, simple_loss=0.2766, pruned_loss=0.0466, over 18342.00 frames. ], tot_loss[loss=0.1619, simple_loss=0.2514, pruned_loss=0.0362, over 3586901.75 frames. ], batch size: 55, lr: 5.08e-03, grad_scale: 8.0 +2023-03-09 15:50:46,084 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.828e+02 2.603e+02 3.051e+02 3.520e+02 6.535e+02, threshold=6.103e+02, percent-clipped=1.0 +2023-03-09 15:50:47,550 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79585.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:51:03,249 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-09 15:51:21,969 INFO [train.py:898] (1/4) Epoch 22, batch 3300, loss[loss=0.1465, simple_loss=0.226, pruned_loss=0.03354, over 18393.00 frames. ], tot_loss[loss=0.1622, simple_loss=0.2515, pruned_loss=0.0364, over 3583394.16 frames. ], batch size: 42, lr: 5.08e-03, grad_scale: 8.0 +2023-03-09 15:51:28,943 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79621.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:51:52,904 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79641.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 15:51:58,620 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79646.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:52:10,788 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79657.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:52:19,966 INFO [train.py:898] (1/4) Epoch 22, batch 3350, loss[loss=0.1554, simple_loss=0.2354, pruned_loss=0.0377, over 18480.00 frames. ], tot_loss[loss=0.1619, simple_loss=0.2511, pruned_loss=0.03641, over 3589682.17 frames. ], batch size: 44, lr: 5.07e-03, grad_scale: 4.0 +2023-03-09 15:52:24,607 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79669.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:52:42,674 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.916e+02 2.736e+02 3.253e+02 3.881e+02 1.172e+03, threshold=6.507e+02, percent-clipped=7.0 +2023-03-09 15:52:47,556 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79689.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:53:18,221 INFO [train.py:898] (1/4) Epoch 22, batch 3400, loss[loss=0.1554, simple_loss=0.2512, pruned_loss=0.02986, over 18487.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2507, pruned_loss=0.03619, over 3588710.08 frames. ], batch size: 51, lr: 5.07e-03, grad_scale: 4.0 +2023-03-09 15:53:22,088 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79718.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:53:24,122 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79720.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:53:36,263 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79731.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 15:54:12,540 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79762.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:54:15,570 INFO [train.py:898] (1/4) Epoch 22, batch 3450, loss[loss=0.1455, simple_loss=0.2254, pruned_loss=0.03275, over 18435.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.251, pruned_loss=0.03598, over 3596627.79 frames. ], batch size: 42, lr: 5.07e-03, grad_scale: 4.0 +2023-03-09 15:54:38,364 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.822e+02 2.532e+02 2.897e+02 3.710e+02 6.365e+02, threshold=5.795e+02, percent-clipped=0.0 +2023-03-09 15:55:14,087 INFO [train.py:898] (1/4) Epoch 22, batch 3500, loss[loss=0.1699, simple_loss=0.2648, pruned_loss=0.03746, over 18373.00 frames. ], tot_loss[loss=0.1612, simple_loss=0.251, pruned_loss=0.03574, over 3604186.44 frames. ], batch size: 50, lr: 5.07e-03, grad_scale: 4.0 +2023-03-09 15:55:24,030 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79823.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:56:09,525 INFO [train.py:898] (1/4) Epoch 22, batch 3550, loss[loss=0.1709, simple_loss=0.2637, pruned_loss=0.03906, over 18474.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2504, pruned_loss=0.03532, over 3603941.36 frames. ], batch size: 59, lr: 5.07e-03, grad_scale: 4.0 +2023-03-09 15:56:32,290 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.670e+02 2.485e+02 2.929e+02 3.619e+02 1.143e+03, threshold=5.859e+02, percent-clipped=2.0 +2023-03-09 15:57:05,048 INFO [train.py:898] (1/4) Epoch 22, batch 3600, loss[loss=0.1656, simple_loss=0.2594, pruned_loss=0.03595, over 18357.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2512, pruned_loss=0.03552, over 3593158.43 frames. ], batch size: 55, lr: 5.07e-03, grad_scale: 8.0 +2023-03-09 15:57:32,480 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79941.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:57:32,520 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79941.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 15:57:37,415 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8435, 3.4647, 2.6903, 3.3119, 4.0624, 2.6252, 3.3721, 3.3934], + device='cuda:1'), covar=tensor([0.0307, 0.0967, 0.1326, 0.0697, 0.0157, 0.1081, 0.0644, 0.0760], + device='cuda:1'), in_proj_covar=tensor([0.0172, 0.0274, 0.0206, 0.0198, 0.0130, 0.0184, 0.0217, 0.0227], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 15:58:07,276 INFO [train.py:898] (1/4) Epoch 23, batch 0, loss[loss=0.1712, simple_loss=0.2607, pruned_loss=0.04083, over 18009.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2607, pruned_loss=0.04083, over 18009.00 frames. ], batch size: 65, lr: 4.95e-03, grad_scale: 8.0 +2023-03-09 15:58:07,276 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 15:58:18,936 INFO [train.py:932] (1/4) Epoch 23, validation: loss=0.1494, simple_loss=0.2493, pruned_loss=0.02473, over 944034.00 frames. +2023-03-09 15:58:18,937 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 15:58:25,339 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=2.00 vs. limit=2.0 +2023-03-09 15:59:01,334 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.492e+02 2.757e+02 3.307e+02 4.142e+02 8.059e+02, threshold=6.615e+02, percent-clipped=1.0 +2023-03-09 15:59:06,121 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79989.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 15:59:06,156 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79989.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:59:17,067 INFO [train.py:898] (1/4) Epoch 23, batch 50, loss[loss=0.1483, simple_loss=0.2387, pruned_loss=0.02896, over 18260.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2536, pruned_loss=0.03646, over 802000.79 frames. ], batch size: 47, lr: 4.95e-03, grad_scale: 8.0 +2023-03-09 15:59:38,367 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80013.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:59:43,202 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-09 15:59:46,638 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80020.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:00:00,066 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80031.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 16:00:04,668 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6505, 2.4743, 2.7159, 2.5936, 3.2431, 4.8971, 4.7193, 3.5205], + device='cuda:1'), covar=tensor([0.1915, 0.2525, 0.3072, 0.2063, 0.2528, 0.0224, 0.0399, 0.0918], + device='cuda:1'), in_proj_covar=tensor([0.0305, 0.0348, 0.0386, 0.0280, 0.0389, 0.0249, 0.0297, 0.0258], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 16:00:06,680 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80037.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:00:20,041 INFO [train.py:898] (1/4) Epoch 23, batch 100, loss[loss=0.1503, simple_loss=0.2354, pruned_loss=0.03264, over 17727.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.2522, pruned_loss=0.03631, over 1426723.42 frames. ], batch size: 39, lr: 4.95e-03, grad_scale: 8.0 +2023-03-09 16:00:42,469 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80068.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:00:45,856 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.93 vs. limit=5.0 +2023-03-09 16:00:56,571 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80079.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 16:01:00,207 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.83 vs. limit=5.0 +2023-03-09 16:01:02,964 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.638e+02 2.619e+02 2.981e+02 3.602e+02 9.245e+02, threshold=5.963e+02, percent-clipped=1.0 +2023-03-09 16:01:18,867 INFO [train.py:898] (1/4) Epoch 23, batch 150, loss[loss=0.1348, simple_loss=0.2177, pruned_loss=0.02597, over 18472.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2524, pruned_loss=0.03637, over 1924342.00 frames. ], batch size: 44, lr: 4.95e-03, grad_scale: 8.0 +2023-03-09 16:01:35,187 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.40 vs. limit=5.0 +2023-03-09 16:01:40,130 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80118.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:02:04,918 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4458, 3.2746, 2.0991, 4.2072, 2.8567, 3.9985, 2.3451, 3.6343], + device='cuda:1'), covar=tensor([0.0636, 0.0875, 0.1444, 0.0571, 0.0891, 0.0302, 0.1240, 0.0466], + device='cuda:1'), in_proj_covar=tensor([0.0218, 0.0229, 0.0192, 0.0289, 0.0196, 0.0267, 0.0207, 0.0203], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 16:02:17,083 INFO [train.py:898] (1/4) Epoch 23, batch 200, loss[loss=0.1517, simple_loss=0.2296, pruned_loss=0.03683, over 17726.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2525, pruned_loss=0.03637, over 2299605.27 frames. ], batch size: 39, lr: 4.95e-03, grad_scale: 8.0 +2023-03-09 16:02:49,394 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3020, 5.2733, 4.9330, 5.1595, 5.2280, 4.6636, 5.1511, 4.9040], + device='cuda:1'), covar=tensor([0.0496, 0.0515, 0.1320, 0.0970, 0.0625, 0.0416, 0.0467, 0.1219], + device='cuda:1'), in_proj_covar=tensor([0.0502, 0.0569, 0.0708, 0.0445, 0.0459, 0.0518, 0.0551, 0.0687], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 16:02:59,527 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.861e+02 2.844e+02 3.404e+02 4.018e+02 9.589e+02, threshold=6.808e+02, percent-clipped=5.0 +2023-03-09 16:03:15,925 INFO [train.py:898] (1/4) Epoch 23, batch 250, loss[loss=0.1876, simple_loss=0.2718, pruned_loss=0.05171, over 12942.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2513, pruned_loss=0.03585, over 2582293.91 frames. ], batch size: 130, lr: 4.94e-03, grad_scale: 8.0 +2023-03-09 16:03:19,245 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.66 vs. limit=2.0 +2023-03-09 16:04:05,523 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80241.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:04:14,919 INFO [train.py:898] (1/4) Epoch 23, batch 300, loss[loss=0.1825, simple_loss=0.2767, pruned_loss=0.04418, over 17787.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.25, pruned_loss=0.03525, over 2817511.39 frames. ], batch size: 70, lr: 4.94e-03, grad_scale: 8.0 +2023-03-09 16:04:33,075 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5426, 3.3170, 2.2096, 4.3485, 3.0651, 4.1060, 2.2743, 4.0075], + device='cuda:1'), covar=tensor([0.0647, 0.0878, 0.1514, 0.0511, 0.0856, 0.0326, 0.1345, 0.0343], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0225, 0.0189, 0.0285, 0.0193, 0.0264, 0.0204, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 16:04:52,909 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80283.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:04:54,783 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.957e+02 2.544e+02 3.151e+02 3.664e+02 8.600e+02, threshold=6.302e+02, percent-clipped=1.0 +2023-03-09 16:05:00,691 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80289.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:05:12,211 INFO [train.py:898] (1/4) Epoch 23, batch 350, loss[loss=0.1718, simple_loss=0.2637, pruned_loss=0.03996, over 18624.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2507, pruned_loss=0.03573, over 2972764.07 frames. ], batch size: 52, lr: 4.94e-03, grad_scale: 8.0 +2023-03-09 16:05:27,987 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80313.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:06:04,337 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80344.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:06:09,755 INFO [train.py:898] (1/4) Epoch 23, batch 400, loss[loss=0.1711, simple_loss=0.2617, pruned_loss=0.04023, over 18620.00 frames. ], tot_loss[loss=0.1616, simple_loss=0.2517, pruned_loss=0.03575, over 3112073.10 frames. ], batch size: 52, lr: 4.94e-03, grad_scale: 8.0 +2023-03-09 16:06:23,710 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80361.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:06:45,703 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5060, 2.2278, 2.5701, 2.5883, 2.9700, 4.8289, 4.6776, 3.2905], + device='cuda:1'), covar=tensor([0.2076, 0.2618, 0.3130, 0.2040, 0.2859, 0.0287, 0.0389, 0.1030], + device='cuda:1'), in_proj_covar=tensor([0.0311, 0.0353, 0.0392, 0.0284, 0.0395, 0.0253, 0.0300, 0.0261], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 16:06:50,966 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.659e+02 2.745e+02 3.129e+02 3.783e+02 6.813e+02, threshold=6.257e+02, percent-clipped=3.0 +2023-03-09 16:07:02,287 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80393.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:07:08,711 INFO [train.py:898] (1/4) Epoch 23, batch 450, loss[loss=0.1765, simple_loss=0.2723, pruned_loss=0.04035, over 18148.00 frames. ], tot_loss[loss=0.1614, simple_loss=0.2515, pruned_loss=0.0356, over 3230827.72 frames. ], batch size: 62, lr: 4.94e-03, grad_scale: 8.0 +2023-03-09 16:07:31,411 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80418.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:08:07,200 INFO [train.py:898] (1/4) Epoch 23, batch 500, loss[loss=0.1505, simple_loss=0.2364, pruned_loss=0.03229, over 18254.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2518, pruned_loss=0.03564, over 3290871.15 frames. ], batch size: 45, lr: 4.94e-03, grad_scale: 8.0 +2023-03-09 16:08:13,276 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80454.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:08:23,656 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80463.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:08:26,789 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80466.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:08:31,856 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-09 16:08:47,895 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.857e+02 2.536e+02 3.027e+02 3.548e+02 8.560e+02, threshold=6.054e+02, percent-clipped=2.0 +2023-03-09 16:08:58,916 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80494.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:09:05,400 INFO [train.py:898] (1/4) Epoch 23, batch 550, loss[loss=0.149, simple_loss=0.2258, pruned_loss=0.0361, over 18403.00 frames. ], tot_loss[loss=0.162, simple_loss=0.2522, pruned_loss=0.03594, over 3367317.50 frames. ], batch size: 42, lr: 4.93e-03, grad_scale: 8.0 +2023-03-09 16:09:34,026 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80524.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:09:35,241 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6727, 3.6830, 3.5155, 3.1785, 3.3904, 2.8156, 2.7237, 3.6826], + device='cuda:1'), covar=tensor([0.0073, 0.0089, 0.0098, 0.0142, 0.0111, 0.0203, 0.0239, 0.0073], + device='cuda:1'), in_proj_covar=tensor([0.0146, 0.0166, 0.0139, 0.0194, 0.0147, 0.0184, 0.0187, 0.0126], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 16:10:02,837 INFO [train.py:898] (1/4) Epoch 23, batch 600, loss[loss=0.1395, simple_loss=0.2218, pruned_loss=0.02863, over 18126.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2518, pruned_loss=0.03591, over 3418678.06 frames. ], batch size: 40, lr: 4.93e-03, grad_scale: 8.0 +2023-03-09 16:10:08,277 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7746, 4.4230, 4.4836, 3.3433, 3.6442, 3.5145, 2.5000, 2.5829], + device='cuda:1'), covar=tensor([0.0220, 0.0172, 0.0076, 0.0311, 0.0325, 0.0219, 0.0753, 0.0824], + device='cuda:1'), in_proj_covar=tensor([0.0071, 0.0062, 0.0065, 0.0070, 0.0091, 0.0068, 0.0078, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 16:10:10,536 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80555.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:10:44,260 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.750e+02 2.691e+02 3.309e+02 4.060e+02 6.105e+02, threshold=6.618e+02, percent-clipped=2.0 +2023-03-09 16:10:59,882 INFO [train.py:898] (1/4) Epoch 23, batch 650, loss[loss=0.1384, simple_loss=0.225, pruned_loss=0.02591, over 18555.00 frames. ], tot_loss[loss=0.161, simple_loss=0.251, pruned_loss=0.03551, over 3469034.26 frames. ], batch size: 45, lr: 4.93e-03, grad_scale: 8.0 +2023-03-09 16:11:44,208 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.3003, 4.7080, 4.6950, 4.7082, 4.3197, 4.6591, 4.1788, 4.6199], + device='cuda:1'), covar=tensor([0.0267, 0.0291, 0.0213, 0.0444, 0.0343, 0.0220, 0.0879, 0.0340], + device='cuda:1'), in_proj_covar=tensor([0.0217, 0.0260, 0.0255, 0.0330, 0.0271, 0.0266, 0.0304, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 16:11:45,391 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6646, 3.6572, 3.5259, 3.1510, 3.3814, 2.8382, 2.7478, 3.6509], + device='cuda:1'), covar=tensor([0.0074, 0.0098, 0.0089, 0.0137, 0.0107, 0.0197, 0.0222, 0.0070], + device='cuda:1'), in_proj_covar=tensor([0.0146, 0.0165, 0.0138, 0.0192, 0.0146, 0.0184, 0.0185, 0.0125], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 16:11:47,354 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80639.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:11:58,520 INFO [train.py:898] (1/4) Epoch 23, batch 700, loss[loss=0.1831, simple_loss=0.2788, pruned_loss=0.04371, over 18125.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2503, pruned_loss=0.0351, over 3499558.24 frames. ], batch size: 62, lr: 4.93e-03, grad_scale: 8.0 +2023-03-09 16:12:41,895 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.765e+02 2.683e+02 3.069e+02 3.814e+02 7.668e+02, threshold=6.138e+02, percent-clipped=2.0 +2023-03-09 16:12:57,646 INFO [train.py:898] (1/4) Epoch 23, batch 750, loss[loss=0.1691, simple_loss=0.2602, pruned_loss=0.03904, over 18037.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2503, pruned_loss=0.0352, over 3527281.37 frames. ], batch size: 65, lr: 4.93e-03, grad_scale: 8.0 +2023-03-09 16:13:11,127 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.13 vs. limit=5.0 +2023-03-09 16:13:12,099 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8643, 4.6161, 4.6846, 3.5714, 3.7765, 3.5441, 2.7598, 2.8028], + device='cuda:1'), covar=tensor([0.0206, 0.0149, 0.0064, 0.0277, 0.0331, 0.0245, 0.0678, 0.0772], + device='cuda:1'), in_proj_covar=tensor([0.0071, 0.0061, 0.0064, 0.0069, 0.0090, 0.0068, 0.0078, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 16:13:35,572 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-09 16:13:42,002 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80736.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:13:56,502 INFO [train.py:898] (1/4) Epoch 23, batch 800, loss[loss=0.1881, simple_loss=0.2764, pruned_loss=0.04993, over 18134.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.2505, pruned_loss=0.03548, over 3542349.41 frames. ], batch size: 62, lr: 4.93e-03, grad_scale: 8.0 +2023-03-09 16:13:56,694 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80749.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:14:25,117 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80773.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:14:37,084 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8946, 2.9048, 4.5828, 3.6950, 2.8796, 4.7621, 4.0902, 3.1384], + device='cuda:1'), covar=tensor([0.0465, 0.1610, 0.0217, 0.0533, 0.1613, 0.0199, 0.0556, 0.0945], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0238, 0.0212, 0.0164, 0.0225, 0.0210, 0.0249, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 16:14:38,914 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.739e+02 2.483e+02 2.864e+02 3.599e+02 5.476e+02, threshold=5.727e+02, percent-clipped=0.0 +2023-03-09 16:14:52,997 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80797.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:14:54,772 INFO [train.py:898] (1/4) Epoch 23, batch 850, loss[loss=0.1546, simple_loss=0.249, pruned_loss=0.03012, over 18393.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.2504, pruned_loss=0.03559, over 3555883.89 frames. ], batch size: 50, lr: 4.93e-03, grad_scale: 8.0 +2023-03-09 16:15:18,315 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80819.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:15:36,680 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80834.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:15:52,795 INFO [train.py:898] (1/4) Epoch 23, batch 900, loss[loss=0.1782, simple_loss=0.2704, pruned_loss=0.04299, over 18388.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2514, pruned_loss=0.03597, over 3558605.07 frames. ], batch size: 52, lr: 4.92e-03, grad_scale: 8.0 +2023-03-09 16:15:54,100 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80850.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:16:34,826 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.923e+02 2.539e+02 2.932e+02 3.679e+02 1.116e+03, threshold=5.864e+02, percent-clipped=6.0 +2023-03-09 16:16:51,057 INFO [train.py:898] (1/4) Epoch 23, batch 950, loss[loss=0.1588, simple_loss=0.2591, pruned_loss=0.02921, over 18498.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2508, pruned_loss=0.03572, over 3565443.15 frames. ], batch size: 51, lr: 4.92e-03, grad_scale: 8.0 +2023-03-09 16:17:17,656 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.77 vs. limit=2.0 +2023-03-09 16:17:37,160 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80939.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:17:48,620 INFO [train.py:898] (1/4) Epoch 23, batch 1000, loss[loss=0.1723, simple_loss=0.2653, pruned_loss=0.03969, over 18489.00 frames. ], tot_loss[loss=0.161, simple_loss=0.2506, pruned_loss=0.03572, over 3559380.36 frames. ], batch size: 53, lr: 4.92e-03, grad_scale: 8.0 +2023-03-09 16:18:30,652 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.676e+02 2.676e+02 3.165e+02 3.583e+02 7.202e+02, threshold=6.331e+02, percent-clipped=5.0 +2023-03-09 16:18:33,110 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80987.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:18:47,000 INFO [train.py:898] (1/4) Epoch 23, batch 1050, loss[loss=0.1942, simple_loss=0.2744, pruned_loss=0.05706, over 12787.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2511, pruned_loss=0.03597, over 3544134.67 frames. ], batch size: 130, lr: 4.92e-03, grad_scale: 8.0 +2023-03-09 16:19:29,960 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0440, 3.2304, 4.6268, 3.8276, 3.0232, 4.9091, 4.1051, 3.2599], + device='cuda:1'), covar=tensor([0.0405, 0.1395, 0.0272, 0.0470, 0.1468, 0.0177, 0.0536, 0.0901], + device='cuda:1'), in_proj_covar=tensor([0.0214, 0.0241, 0.0216, 0.0166, 0.0228, 0.0212, 0.0251, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 16:19:45,394 INFO [train.py:898] (1/4) Epoch 23, batch 1100, loss[loss=0.1724, simple_loss=0.262, pruned_loss=0.04145, over 15883.00 frames. ], tot_loss[loss=0.1616, simple_loss=0.2515, pruned_loss=0.0359, over 3553736.28 frames. ], batch size: 94, lr: 4.92e-03, grad_scale: 8.0 +2023-03-09 16:19:45,730 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81049.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:20:27,975 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.813e+02 2.500e+02 2.967e+02 3.515e+02 7.145e+02, threshold=5.934e+02, percent-clipped=1.0 +2023-03-09 16:20:35,911 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81092.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:20:41,606 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81097.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:20:43,684 INFO [train.py:898] (1/4) Epoch 23, batch 1150, loss[loss=0.1536, simple_loss=0.2395, pruned_loss=0.03384, over 18302.00 frames. ], tot_loss[loss=0.1622, simple_loss=0.2522, pruned_loss=0.03609, over 3565578.25 frames. ], batch size: 49, lr: 4.92e-03, grad_scale: 8.0 +2023-03-09 16:21:03,395 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81116.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:21:06,667 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81119.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:21:18,130 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81129.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:21:31,151 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-09 16:21:41,473 INFO [train.py:898] (1/4) Epoch 23, batch 1200, loss[loss=0.1406, simple_loss=0.2219, pruned_loss=0.02966, over 18083.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2524, pruned_loss=0.03608, over 3567867.46 frames. ], batch size: 40, lr: 4.91e-03, grad_scale: 8.0 +2023-03-09 16:21:42,849 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81150.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:22:02,577 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81167.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:22:13,642 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81177.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:22:22,855 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.778e+02 2.664e+02 3.163e+02 3.705e+02 6.920e+02, threshold=6.326e+02, percent-clipped=3.0 +2023-03-09 16:22:35,647 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3137, 5.2831, 4.9340, 5.2210, 5.2180, 4.5672, 5.1063, 4.8867], + device='cuda:1'), covar=tensor([0.0394, 0.0445, 0.1334, 0.0830, 0.0605, 0.0471, 0.0471, 0.1113], + device='cuda:1'), in_proj_covar=tensor([0.0497, 0.0571, 0.0704, 0.0441, 0.0458, 0.0520, 0.0547, 0.0685], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 16:22:37,339 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.71 vs. limit=2.0 +2023-03-09 16:22:39,003 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81198.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:22:39,915 INFO [train.py:898] (1/4) Epoch 23, batch 1250, loss[loss=0.1803, simple_loss=0.2683, pruned_loss=0.04609, over 18352.00 frames. ], tot_loss[loss=0.1621, simple_loss=0.2523, pruned_loss=0.03602, over 3572626.36 frames. ], batch size: 55, lr: 4.91e-03, grad_scale: 8.0 +2023-03-09 16:22:42,075 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.58 vs. limit=2.0 +2023-03-09 16:23:17,290 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7861, 3.0721, 4.4634, 3.5652, 2.8973, 4.6810, 3.9595, 2.8379], + device='cuda:1'), covar=tensor([0.0443, 0.1349, 0.0287, 0.0522, 0.1389, 0.0178, 0.0548, 0.1039], + device='cuda:1'), in_proj_covar=tensor([0.0210, 0.0235, 0.0212, 0.0163, 0.0223, 0.0208, 0.0246, 0.0194], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 16:23:39,292 INFO [train.py:898] (1/4) Epoch 23, batch 1300, loss[loss=0.1423, simple_loss=0.2358, pruned_loss=0.02442, over 18384.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2516, pruned_loss=0.03575, over 3566460.30 frames. ], batch size: 50, lr: 4.91e-03, grad_scale: 8.0 +2023-03-09 16:24:22,196 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.771e+02 2.557e+02 2.964e+02 3.868e+02 7.836e+02, threshold=5.928e+02, percent-clipped=1.0 +2023-03-09 16:24:29,964 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-09 16:24:37,825 INFO [train.py:898] (1/4) Epoch 23, batch 1350, loss[loss=0.1756, simple_loss=0.2655, pruned_loss=0.04282, over 18288.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2518, pruned_loss=0.03594, over 3568583.29 frames. ], batch size: 57, lr: 4.91e-03, grad_scale: 4.0 +2023-03-09 16:24:38,208 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81299.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:24:40,508 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81301.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:24:50,108 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-09 16:25:36,790 INFO [train.py:898] (1/4) Epoch 23, batch 1400, loss[loss=0.1473, simple_loss=0.2416, pruned_loss=0.02657, over 18385.00 frames. ], tot_loss[loss=0.1619, simple_loss=0.2519, pruned_loss=0.036, over 3566789.35 frames. ], batch size: 50, lr: 4.91e-03, grad_scale: 4.0 +2023-03-09 16:25:49,559 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81360.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:25:51,888 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81362.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:26:19,228 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.872e+02 2.620e+02 3.077e+02 3.710e+02 7.565e+02, threshold=6.154e+02, percent-clipped=6.0 +2023-03-09 16:26:26,924 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81392.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:26:35,531 INFO [train.py:898] (1/4) Epoch 23, batch 1450, loss[loss=0.1611, simple_loss=0.2527, pruned_loss=0.03474, over 18636.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2514, pruned_loss=0.03596, over 3561369.93 frames. ], batch size: 52, lr: 4.91e-03, grad_scale: 4.0 +2023-03-09 16:26:58,400 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-09 16:27:10,803 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81429.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:27:23,603 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81440.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:27:34,459 INFO [train.py:898] (1/4) Epoch 23, batch 1500, loss[loss=0.1808, simple_loss=0.2652, pruned_loss=0.04816, over 12363.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2515, pruned_loss=0.03603, over 3553887.97 frames. ], batch size: 130, lr: 4.91e-03, grad_scale: 4.0 +2023-03-09 16:28:01,897 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81472.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:28:07,465 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81477.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:28:17,384 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.729e+02 2.620e+02 2.957e+02 3.344e+02 7.866e+02, threshold=5.914e+02, percent-clipped=3.0 +2023-03-09 16:28:32,684 INFO [train.py:898] (1/4) Epoch 23, batch 1550, loss[loss=0.1435, simple_loss=0.2308, pruned_loss=0.02813, over 18570.00 frames. ], tot_loss[loss=0.1616, simple_loss=0.2514, pruned_loss=0.03595, over 3560005.65 frames. ], batch size: 49, lr: 4.90e-03, grad_scale: 4.0 +2023-03-09 16:29:31,015 INFO [train.py:898] (1/4) Epoch 23, batch 1600, loss[loss=0.1326, simple_loss=0.2246, pruned_loss=0.02027, over 18251.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2517, pruned_loss=0.03583, over 3576501.76 frames. ], batch size: 47, lr: 4.90e-03, grad_scale: 8.0 +2023-03-09 16:29:50,107 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0456, 4.6991, 4.7122, 3.6303, 3.8802, 3.8562, 2.9283, 2.9691], + device='cuda:1'), covar=tensor([0.0216, 0.0154, 0.0087, 0.0287, 0.0347, 0.0197, 0.0661, 0.0746], + device='cuda:1'), in_proj_covar=tensor([0.0073, 0.0061, 0.0065, 0.0070, 0.0091, 0.0067, 0.0078, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 16:30:15,102 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.871e+02 2.795e+02 3.306e+02 4.129e+02 9.714e+02, threshold=6.611e+02, percent-clipped=7.0 +2023-03-09 16:30:29,166 INFO [train.py:898] (1/4) Epoch 23, batch 1650, loss[loss=0.1621, simple_loss=0.2551, pruned_loss=0.0345, over 18546.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.2505, pruned_loss=0.0354, over 3585675.31 frames. ], batch size: 49, lr: 4.90e-03, grad_scale: 4.0 +2023-03-09 16:31:28,181 INFO [train.py:898] (1/4) Epoch 23, batch 1700, loss[loss=0.1672, simple_loss=0.2661, pruned_loss=0.03417, over 18481.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2498, pruned_loss=0.03509, over 3579685.64 frames. ], batch size: 53, lr: 4.90e-03, grad_scale: 4.0 +2023-03-09 16:31:35,084 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81655.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:31:38,645 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81657.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:31:38,809 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5506, 2.8797, 4.1871, 3.4636, 2.5522, 4.3922, 3.8636, 2.6220], + device='cuda:1'), covar=tensor([0.0511, 0.1411, 0.0312, 0.0507, 0.1693, 0.0258, 0.0594, 0.1065], + device='cuda:1'), in_proj_covar=tensor([0.0214, 0.0239, 0.0215, 0.0167, 0.0227, 0.0213, 0.0250, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 16:31:53,670 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81670.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:32:13,034 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.642e+02 2.453e+02 2.824e+02 3.386e+02 8.042e+02, threshold=5.649e+02, percent-clipped=1.0 +2023-03-09 16:32:26,757 INFO [train.py:898] (1/4) Epoch 23, batch 1750, loss[loss=0.1735, simple_loss=0.2649, pruned_loss=0.04102, over 16925.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2497, pruned_loss=0.03512, over 3574504.55 frames. ], batch size: 78, lr: 4.90e-03, grad_scale: 4.0 +2023-03-09 16:32:36,892 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-03-09 16:33:01,153 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8751, 5.3613, 5.2958, 5.3333, 4.7921, 5.1943, 4.6586, 5.1978], + device='cuda:1'), covar=tensor([0.0215, 0.0247, 0.0184, 0.0337, 0.0419, 0.0220, 0.1000, 0.0289], + device='cuda:1'), in_proj_covar=tensor([0.0218, 0.0262, 0.0257, 0.0333, 0.0273, 0.0268, 0.0306, 0.0261], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 16:33:04,533 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81731.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 16:33:25,097 INFO [train.py:898] (1/4) Epoch 23, batch 1800, loss[loss=0.156, simple_loss=0.2507, pruned_loss=0.03059, over 17050.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.251, pruned_loss=0.03544, over 3571318.31 frames. ], batch size: 78, lr: 4.90e-03, grad_scale: 2.0 +2023-03-09 16:33:48,970 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8617, 3.0905, 4.4423, 3.9244, 2.7016, 4.5945, 4.0428, 2.6286], + device='cuda:1'), covar=tensor([0.0421, 0.1423, 0.0323, 0.0413, 0.1730, 0.0232, 0.0547, 0.1285], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0240, 0.0216, 0.0168, 0.0227, 0.0212, 0.0251, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 16:33:52,246 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81772.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:34:10,544 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.038e+02 2.659e+02 3.003e+02 3.499e+02 8.184e+02, threshold=6.005e+02, percent-clipped=4.0 +2023-03-09 16:34:23,257 INFO [train.py:898] (1/4) Epoch 23, batch 1850, loss[loss=0.1702, simple_loss=0.2587, pruned_loss=0.04083, over 18268.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.2506, pruned_loss=0.0354, over 3564696.96 frames. ], batch size: 60, lr: 4.90e-03, grad_scale: 2.0 +2023-03-09 16:34:25,367 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-09 16:34:48,396 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81820.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:34:49,935 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6223, 2.3040, 2.5777, 2.5836, 3.1613, 4.7383, 4.6896, 3.1840], + device='cuda:1'), covar=tensor([0.1896, 0.2530, 0.2999, 0.2018, 0.2481, 0.0265, 0.0366, 0.1045], + device='cuda:1'), in_proj_covar=tensor([0.0308, 0.0351, 0.0388, 0.0282, 0.0392, 0.0252, 0.0298, 0.0260], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 16:35:21,108 INFO [train.py:898] (1/4) Epoch 23, batch 1900, loss[loss=0.2081, simple_loss=0.2854, pruned_loss=0.06536, over 12378.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.2508, pruned_loss=0.03549, over 3576047.69 frames. ], batch size: 131, lr: 4.89e-03, grad_scale: 2.0 +2023-03-09 16:35:25,837 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.6377, 6.1469, 5.7373, 5.9250, 5.7638, 5.5813, 6.2418, 6.1614], + device='cuda:1'), covar=tensor([0.1225, 0.0782, 0.0439, 0.0688, 0.1446, 0.0708, 0.0614, 0.0725], + device='cuda:1'), in_proj_covar=tensor([0.0626, 0.0541, 0.0391, 0.0569, 0.0774, 0.0564, 0.0781, 0.0590], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 16:35:56,088 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6861, 3.0547, 4.3116, 3.5283, 2.8802, 4.4686, 3.9129, 2.8048], + device='cuda:1'), covar=tensor([0.0499, 0.1312, 0.0294, 0.0521, 0.1372, 0.0207, 0.0574, 0.1035], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0240, 0.0216, 0.0167, 0.0227, 0.0213, 0.0251, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 16:36:07,196 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.831e+02 2.605e+02 3.105e+02 3.631e+02 5.941e+02, threshold=6.209e+02, percent-clipped=0.0 +2023-03-09 16:36:16,860 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5316, 5.5291, 5.1409, 5.4421, 5.4725, 4.8752, 5.3667, 5.1272], + device='cuda:1'), covar=tensor([0.0437, 0.0420, 0.1250, 0.0834, 0.0555, 0.0397, 0.0433, 0.0979], + device='cuda:1'), in_proj_covar=tensor([0.0497, 0.0564, 0.0704, 0.0440, 0.0458, 0.0516, 0.0543, 0.0679], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 16:36:20,015 INFO [train.py:898] (1/4) Epoch 23, batch 1950, loss[loss=0.1448, simple_loss=0.2252, pruned_loss=0.0322, over 18487.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2512, pruned_loss=0.03551, over 3580541.72 frames. ], batch size: 44, lr: 4.89e-03, grad_scale: 2.0 +2023-03-09 16:37:17,995 INFO [train.py:898] (1/4) Epoch 23, batch 2000, loss[loss=0.1664, simple_loss=0.2605, pruned_loss=0.03618, over 18365.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.2507, pruned_loss=0.0354, over 3584352.47 frames. ], batch size: 56, lr: 4.89e-03, grad_scale: 4.0 +2023-03-09 16:37:25,081 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81955.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:37:27,397 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81957.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:37:57,848 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.11 vs. limit=5.0 +2023-03-09 16:38:03,329 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81987.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:38:04,115 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.810e+02 2.510e+02 2.896e+02 3.401e+02 7.542e+02, threshold=5.791e+02, percent-clipped=1.0 +2023-03-09 16:38:17,072 INFO [train.py:898] (1/4) Epoch 23, batch 2050, loss[loss=0.1423, simple_loss=0.2231, pruned_loss=0.03074, over 17736.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2503, pruned_loss=0.0351, over 3591906.48 frames. ], batch size: 39, lr: 4.89e-03, grad_scale: 4.0 +2023-03-09 16:38:26,632 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82003.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:38:28,883 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82005.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:38:54,186 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82026.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 16:39:05,674 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6522, 2.3055, 2.6759, 2.6884, 3.1840, 4.8142, 4.7947, 3.2341], + device='cuda:1'), covar=tensor([0.1893, 0.2457, 0.2909, 0.1884, 0.2385, 0.0246, 0.0325, 0.1002], + device='cuda:1'), in_proj_covar=tensor([0.0307, 0.0349, 0.0385, 0.0281, 0.0390, 0.0250, 0.0297, 0.0259], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 16:39:18,981 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82048.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:39:19,763 INFO [train.py:898] (1/4) Epoch 23, batch 2100, loss[loss=0.1542, simple_loss=0.2467, pruned_loss=0.03086, over 18393.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2507, pruned_loss=0.03519, over 3576405.77 frames. ], batch size: 50, lr: 4.89e-03, grad_scale: 4.0 +2023-03-09 16:39:27,662 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82055.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:39:46,104 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7935, 2.9959, 2.6566, 2.9766, 3.7757, 3.7381, 3.3266, 3.1069], + device='cuda:1'), covar=tensor([0.0168, 0.0283, 0.0576, 0.0351, 0.0177, 0.0148, 0.0342, 0.0372], + device='cuda:1'), in_proj_covar=tensor([0.0140, 0.0138, 0.0162, 0.0160, 0.0134, 0.0120, 0.0155, 0.0159], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 16:40:05,446 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.125e+02 2.645e+02 3.113e+02 3.776e+02 5.395e+02, threshold=6.226e+02, percent-clipped=0.0 +2023-03-09 16:40:18,012 INFO [train.py:898] (1/4) Epoch 23, batch 2150, loss[loss=0.1425, simple_loss=0.2251, pruned_loss=0.02992, over 17741.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.2509, pruned_loss=0.0355, over 3564397.75 frames. ], batch size: 39, lr: 4.89e-03, grad_scale: 4.0 +2023-03-09 16:40:29,451 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-03-09 16:40:38,325 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82116.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:41:16,326 INFO [train.py:898] (1/4) Epoch 23, batch 2200, loss[loss=0.1844, simple_loss=0.2734, pruned_loss=0.04774, over 17873.00 frames. ], tot_loss[loss=0.161, simple_loss=0.2507, pruned_loss=0.03563, over 3563129.44 frames. ], batch size: 70, lr: 4.88e-03, grad_scale: 4.0 +2023-03-09 16:41:18,903 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8219, 3.5988, 5.0955, 3.0332, 4.3979, 2.6781, 3.1204, 1.9306], + device='cuda:1'), covar=tensor([0.1226, 0.0983, 0.0142, 0.0907, 0.0542, 0.2532, 0.2726, 0.2149], + device='cuda:1'), in_proj_covar=tensor([0.0227, 0.0250, 0.0206, 0.0204, 0.0263, 0.0279, 0.0331, 0.0243], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 16:41:34,861 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1082, 3.1937, 4.6224, 3.9126, 3.1143, 4.8890, 4.1501, 3.1835], + device='cuda:1'), covar=tensor([0.0375, 0.1309, 0.0230, 0.0456, 0.1318, 0.0158, 0.0522, 0.0860], + device='cuda:1'), in_proj_covar=tensor([0.0216, 0.0240, 0.0217, 0.0167, 0.0227, 0.0212, 0.0251, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 16:41:45,553 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0163, 5.1685, 5.2789, 5.3482, 4.9410, 5.7875, 5.4147, 5.0398], + device='cuda:1'), covar=tensor([0.1179, 0.0772, 0.0903, 0.0792, 0.1447, 0.0797, 0.0766, 0.1866], + device='cuda:1'), in_proj_covar=tensor([0.0368, 0.0294, 0.0318, 0.0321, 0.0336, 0.0436, 0.0290, 0.0428], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 16:42:02,195 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.746e+02 2.684e+02 3.246e+02 4.259e+02 8.856e+02, threshold=6.492e+02, percent-clipped=7.0 +2023-03-09 16:42:14,749 INFO [train.py:898] (1/4) Epoch 23, batch 2250, loss[loss=0.1542, simple_loss=0.2387, pruned_loss=0.03483, over 18303.00 frames. ], tot_loss[loss=0.1622, simple_loss=0.2521, pruned_loss=0.0362, over 3562275.29 frames. ], batch size: 49, lr: 4.88e-03, grad_scale: 4.0 +2023-03-09 16:43:13,053 INFO [train.py:898] (1/4) Epoch 23, batch 2300, loss[loss=0.1596, simple_loss=0.2454, pruned_loss=0.03685, over 18440.00 frames. ], tot_loss[loss=0.1622, simple_loss=0.2525, pruned_loss=0.036, over 3577791.21 frames. ], batch size: 43, lr: 4.88e-03, grad_scale: 4.0 +2023-03-09 16:43:22,919 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-09 16:43:59,037 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.010e+02 2.750e+02 3.270e+02 3.982e+02 6.063e+02, threshold=6.540e+02, percent-clipped=0.0 +2023-03-09 16:44:11,843 INFO [train.py:898] (1/4) Epoch 23, batch 2350, loss[loss=0.1631, simple_loss=0.2573, pruned_loss=0.03439, over 18495.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.2526, pruned_loss=0.03604, over 3576205.81 frames. ], batch size: 51, lr: 4.88e-03, grad_scale: 4.0 +2023-03-09 16:44:41,156 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7069, 3.7656, 3.5438, 3.1873, 3.4368, 2.8620, 2.7853, 3.7674], + device='cuda:1'), covar=tensor([0.0061, 0.0084, 0.0080, 0.0135, 0.0104, 0.0187, 0.0221, 0.0062], + device='cuda:1'), in_proj_covar=tensor([0.0146, 0.0167, 0.0140, 0.0194, 0.0148, 0.0185, 0.0188, 0.0126], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 16:44:43,817 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82326.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:45:04,036 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82343.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:45:07,555 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82346.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:45:10,706 INFO [train.py:898] (1/4) Epoch 23, batch 2400, loss[loss=0.144, simple_loss=0.2302, pruned_loss=0.02889, over 18158.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.252, pruned_loss=0.03573, over 3592290.75 frames. ], batch size: 44, lr: 4.88e-03, grad_scale: 8.0 +2023-03-09 16:45:39,463 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82374.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:45:55,567 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.666e+02 2.631e+02 3.119e+02 3.737e+02 9.140e+02, threshold=6.238e+02, percent-clipped=2.0 +2023-03-09 16:46:09,288 INFO [train.py:898] (1/4) Epoch 23, batch 2450, loss[loss=0.1741, simple_loss=0.2673, pruned_loss=0.04045, over 17733.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.2511, pruned_loss=0.0354, over 3595218.10 frames. ], batch size: 70, lr: 4.88e-03, grad_scale: 8.0 +2023-03-09 16:46:18,836 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82407.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:46:23,356 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82411.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:46:40,735 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4575, 5.4528, 5.0634, 5.4295, 5.3748, 4.7947, 5.2903, 5.0055], + device='cuda:1'), covar=tensor([0.0464, 0.0435, 0.1504, 0.0674, 0.0550, 0.0460, 0.0495, 0.1161], + device='cuda:1'), in_proj_covar=tensor([0.0511, 0.0578, 0.0722, 0.0443, 0.0466, 0.0527, 0.0557, 0.0697], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 16:46:49,551 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.82 vs. limit=5.0 +2023-03-09 16:47:08,005 INFO [train.py:898] (1/4) Epoch 23, batch 2500, loss[loss=0.1686, simple_loss=0.2582, pruned_loss=0.03951, over 18186.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.2506, pruned_loss=0.03532, over 3599109.53 frames. ], batch size: 60, lr: 4.88e-03, grad_scale: 8.0 +2023-03-09 16:47:46,328 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-03-09 16:47:52,902 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.741e+02 2.534e+02 3.083e+02 3.529e+02 5.828e+02, threshold=6.166e+02, percent-clipped=0.0 +2023-03-09 16:47:58,764 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82493.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 16:48:06,026 INFO [train.py:898] (1/4) Epoch 23, batch 2550, loss[loss=0.1467, simple_loss=0.2318, pruned_loss=0.03075, over 18491.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.2504, pruned_loss=0.03519, over 3594373.35 frames. ], batch size: 47, lr: 4.87e-03, grad_scale: 8.0 +2023-03-09 16:48:56,513 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.2253, 2.6026, 2.3781, 2.6405, 3.3005, 3.3048, 3.0278, 2.7284], + device='cuda:1'), covar=tensor([0.0186, 0.0267, 0.0530, 0.0381, 0.0221, 0.0145, 0.0350, 0.0384], + device='cuda:1'), in_proj_covar=tensor([0.0139, 0.0139, 0.0163, 0.0160, 0.0135, 0.0120, 0.0155, 0.0160], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 16:49:03,666 INFO [train.py:898] (1/4) Epoch 23, batch 2600, loss[loss=0.1704, simple_loss=0.2597, pruned_loss=0.0405, over 17825.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.2507, pruned_loss=0.03526, over 3592657.76 frames. ], batch size: 70, lr: 4.87e-03, grad_scale: 8.0 +2023-03-09 16:49:09,638 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-03-09 16:49:10,286 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82554.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 16:49:20,730 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-09 16:49:27,065 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82569.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:49:35,130 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8101, 3.7988, 3.5631, 3.2758, 3.4711, 2.9636, 2.9276, 3.7993], + device='cuda:1'), covar=tensor([0.0059, 0.0081, 0.0090, 0.0134, 0.0101, 0.0183, 0.0199, 0.0066], + device='cuda:1'), in_proj_covar=tensor([0.0146, 0.0166, 0.0139, 0.0192, 0.0147, 0.0183, 0.0186, 0.0125], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 16:49:49,705 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.492e+02 2.620e+02 3.051e+02 3.714e+02 9.693e+02, threshold=6.103e+02, percent-clipped=7.0 +2023-03-09 16:50:01,837 INFO [train.py:898] (1/4) Epoch 23, batch 2650, loss[loss=0.2018, simple_loss=0.2787, pruned_loss=0.06246, over 12419.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.2508, pruned_loss=0.03526, over 3592879.09 frames. ], batch size: 130, lr: 4.87e-03, grad_scale: 4.0 +2023-03-09 16:50:30,552 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5731, 5.5163, 5.1836, 5.4769, 5.4306, 4.8750, 5.3976, 5.1507], + device='cuda:1'), covar=tensor([0.0355, 0.0425, 0.1180, 0.0719, 0.0595, 0.0388, 0.0366, 0.0981], + device='cuda:1'), in_proj_covar=tensor([0.0505, 0.0575, 0.0721, 0.0442, 0.0464, 0.0525, 0.0552, 0.0694], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 16:50:38,566 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82630.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:50:54,247 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82643.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:51:00,473 INFO [train.py:898] (1/4) Epoch 23, batch 2700, loss[loss=0.1488, simple_loss=0.2382, pruned_loss=0.02968, over 18381.00 frames. ], tot_loss[loss=0.161, simple_loss=0.2511, pruned_loss=0.03544, over 3577956.36 frames. ], batch size: 42, lr: 4.87e-03, grad_scale: 4.0 +2023-03-09 16:51:22,382 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-09 16:51:46,590 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.924e+02 2.721e+02 3.267e+02 3.832e+02 9.086e+02, threshold=6.534e+02, percent-clipped=2.0 +2023-03-09 16:51:49,672 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82691.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:51:58,553 INFO [train.py:898] (1/4) Epoch 23, batch 2750, loss[loss=0.1567, simple_loss=0.2502, pruned_loss=0.03161, over 18482.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2517, pruned_loss=0.03566, over 3575911.00 frames. ], batch size: 51, lr: 4.87e-03, grad_scale: 4.0 +2023-03-09 16:52:02,231 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82702.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:52:12,923 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82711.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:52:30,859 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7550, 3.8031, 3.5459, 3.1224, 3.4512, 2.8097, 2.7402, 3.7729], + device='cuda:1'), covar=tensor([0.0057, 0.0070, 0.0093, 0.0139, 0.0093, 0.0199, 0.0227, 0.0074], + device='cuda:1'), in_proj_covar=tensor([0.0147, 0.0167, 0.0139, 0.0194, 0.0148, 0.0184, 0.0188, 0.0126], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 16:52:55,994 INFO [train.py:898] (1/4) Epoch 23, batch 2800, loss[loss=0.1651, simple_loss=0.255, pruned_loss=0.03759, over 18316.00 frames. ], tot_loss[loss=0.161, simple_loss=0.2512, pruned_loss=0.03542, over 3580375.61 frames. ], batch size: 54, lr: 4.87e-03, grad_scale: 8.0 +2023-03-09 16:53:08,023 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82759.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:53:24,522 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7827, 5.2681, 5.2034, 5.2789, 4.7324, 5.1248, 4.6195, 5.0953], + device='cuda:1'), covar=tensor([0.0244, 0.0267, 0.0215, 0.0390, 0.0410, 0.0228, 0.1048, 0.0315], + device='cuda:1'), in_proj_covar=tensor([0.0220, 0.0264, 0.0260, 0.0337, 0.0276, 0.0274, 0.0309, 0.0264], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 16:53:31,399 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8211, 3.6704, 5.2414, 4.7030, 3.3840, 3.2317, 4.7305, 5.3971], + device='cuda:1'), covar=tensor([0.0809, 0.1693, 0.0146, 0.0335, 0.0935, 0.1154, 0.0340, 0.0237], + device='cuda:1'), in_proj_covar=tensor([0.0152, 0.0280, 0.0161, 0.0186, 0.0194, 0.0194, 0.0199, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 16:53:42,292 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.820e+02 2.582e+02 3.010e+02 3.495e+02 9.650e+02, threshold=6.020e+02, percent-clipped=2.0 +2023-03-09 16:53:54,216 INFO [train.py:898] (1/4) Epoch 23, batch 2850, loss[loss=0.167, simple_loss=0.2647, pruned_loss=0.03461, over 17127.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2513, pruned_loss=0.03546, over 3579425.61 frames. ], batch size: 78, lr: 4.87e-03, grad_scale: 8.0 +2023-03-09 16:54:25,111 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8652, 4.4249, 4.4501, 3.4172, 3.6711, 3.5039, 2.3356, 2.5301], + device='cuda:1'), covar=tensor([0.0214, 0.0150, 0.0091, 0.0310, 0.0381, 0.0228, 0.0837, 0.0895], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0062, 0.0065, 0.0070, 0.0091, 0.0068, 0.0078, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 16:54:28,432 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.6149, 6.1784, 5.6527, 5.9599, 5.7733, 5.5491, 6.2384, 6.1948], + device='cuda:1'), covar=tensor([0.1119, 0.0682, 0.0390, 0.0609, 0.1213, 0.0642, 0.0505, 0.0663], + device='cuda:1'), in_proj_covar=tensor([0.0615, 0.0538, 0.0385, 0.0560, 0.0761, 0.0554, 0.0768, 0.0583], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 16:54:41,140 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9269, 3.1023, 2.8674, 3.0496, 3.8944, 3.8765, 3.3453, 3.1719], + device='cuda:1'), covar=tensor([0.0175, 0.0253, 0.0479, 0.0368, 0.0166, 0.0136, 0.0370, 0.0368], + device='cuda:1'), in_proj_covar=tensor([0.0140, 0.0138, 0.0163, 0.0160, 0.0134, 0.0120, 0.0157, 0.0160], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 16:54:52,995 INFO [train.py:898] (1/4) Epoch 23, batch 2900, loss[loss=0.1643, simple_loss=0.2536, pruned_loss=0.03746, over 17122.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.2507, pruned_loss=0.03524, over 3584826.62 frames. ], batch size: 78, lr: 4.86e-03, grad_scale: 8.0 +2023-03-09 16:54:53,185 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82849.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 16:55:21,976 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8918, 4.9309, 2.7688, 4.7526, 4.7089, 4.9515, 4.7788, 2.6948], + device='cuda:1'), covar=tensor([0.0210, 0.0068, 0.0740, 0.0103, 0.0076, 0.0078, 0.0098, 0.0878], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0081, 0.0096, 0.0096, 0.0087, 0.0078, 0.0086, 0.0097], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 16:55:39,402 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.765e+02 2.643e+02 2.969e+02 3.593e+02 6.828e+02, threshold=5.939e+02, percent-clipped=2.0 +2023-03-09 16:55:46,606 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0465, 5.5004, 3.0096, 5.3155, 5.1775, 5.5271, 5.3700, 2.9629], + device='cuda:1'), covar=tensor([0.0207, 0.0054, 0.0721, 0.0066, 0.0074, 0.0067, 0.0076, 0.0809], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0082, 0.0097, 0.0097, 0.0088, 0.0078, 0.0086, 0.0097], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 16:55:51,340 INFO [train.py:898] (1/4) Epoch 23, batch 2950, loss[loss=0.161, simple_loss=0.2509, pruned_loss=0.0356, over 18417.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.2509, pruned_loss=0.0354, over 3573671.62 frames. ], batch size: 48, lr: 4.86e-03, grad_scale: 8.0 +2023-03-09 16:55:56,057 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.49 vs. limit=5.0 +2023-03-09 16:56:04,026 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-09 16:56:10,629 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9207, 5.0245, 4.9863, 4.7432, 4.7922, 4.7377, 5.0931, 5.1095], + device='cuda:1'), covar=tensor([0.0072, 0.0057, 0.0070, 0.0114, 0.0063, 0.0187, 0.0081, 0.0087], + device='cuda:1'), in_proj_covar=tensor([0.0097, 0.0071, 0.0076, 0.0095, 0.0076, 0.0106, 0.0089, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 16:56:22,404 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82925.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:56:49,686 INFO [train.py:898] (1/4) Epoch 23, batch 3000, loss[loss=0.1518, simple_loss=0.2509, pruned_loss=0.02632, over 18307.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.2506, pruned_loss=0.03545, over 3572779.31 frames. ], batch size: 54, lr: 4.86e-03, grad_scale: 8.0 +2023-03-09 16:56:49,686 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 16:57:01,577 INFO [train.py:932] (1/4) Epoch 23, validation: loss=0.1503, simple_loss=0.2492, pruned_loss=0.02572, over 944034.00 frames. +2023-03-09 16:57:01,577 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 16:57:19,230 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82963.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:57:48,595 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.819e+02 2.491e+02 2.993e+02 3.582e+02 9.758e+02, threshold=5.985e+02, percent-clipped=2.0 +2023-03-09 16:58:00,493 INFO [train.py:898] (1/4) Epoch 23, batch 3050, loss[loss=0.1566, simple_loss=0.2534, pruned_loss=0.02991, over 18556.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.25, pruned_loss=0.03517, over 3573807.95 frames. ], batch size: 54, lr: 4.86e-03, grad_scale: 8.0 +2023-03-09 16:58:02,486 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7561, 2.9883, 2.6703, 2.9080, 3.7503, 3.7203, 3.2376, 3.0594], + device='cuda:1'), covar=tensor([0.0157, 0.0271, 0.0508, 0.0402, 0.0154, 0.0145, 0.0345, 0.0365], + device='cuda:1'), in_proj_covar=tensor([0.0139, 0.0138, 0.0162, 0.0159, 0.0133, 0.0119, 0.0156, 0.0159], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 16:58:04,574 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83002.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:58:30,293 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83024.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:58:57,719 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4863, 2.8448, 2.5492, 2.8153, 3.5428, 3.4984, 3.0514, 2.8276], + device='cuda:1'), covar=tensor([0.0209, 0.0278, 0.0526, 0.0392, 0.0187, 0.0172, 0.0436, 0.0397], + device='cuda:1'), in_proj_covar=tensor([0.0139, 0.0138, 0.0162, 0.0160, 0.0133, 0.0120, 0.0156, 0.0160], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 16:58:59,525 INFO [train.py:898] (1/4) Epoch 23, batch 3100, loss[loss=0.1569, simple_loss=0.2425, pruned_loss=0.03569, over 18266.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.2499, pruned_loss=0.03518, over 3586372.82 frames. ], batch size: 47, lr: 4.86e-03, grad_scale: 4.0 +2023-03-09 16:59:00,886 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83050.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:59:46,643 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.874e+02 2.570e+02 3.037e+02 3.721e+02 1.351e+03, threshold=6.073e+02, percent-clipped=1.0 +2023-03-09 16:59:57,763 INFO [train.py:898] (1/4) Epoch 23, batch 3150, loss[loss=0.1635, simple_loss=0.2518, pruned_loss=0.03767, over 18271.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2498, pruned_loss=0.0353, over 3582754.12 frames. ], batch size: 49, lr: 4.86e-03, grad_scale: 4.0 +2023-03-09 17:00:17,141 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6858, 3.6320, 4.9830, 4.4322, 3.3607, 3.1305, 4.4194, 5.1793], + device='cuda:1'), covar=tensor([0.0831, 0.1481, 0.0212, 0.0376, 0.0906, 0.1121, 0.0389, 0.0298], + device='cuda:1'), in_proj_covar=tensor([0.0152, 0.0278, 0.0161, 0.0185, 0.0194, 0.0193, 0.0198, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 17:00:51,123 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7233, 4.6539, 4.7743, 4.4809, 4.4883, 4.6395, 4.8767, 4.7992], + device='cuda:1'), covar=tensor([0.0113, 0.0098, 0.0133, 0.0149, 0.0107, 0.0182, 0.0097, 0.0127], + device='cuda:1'), in_proj_covar=tensor([0.0096, 0.0071, 0.0076, 0.0095, 0.0076, 0.0106, 0.0089, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 17:00:55,292 INFO [train.py:898] (1/4) Epoch 23, batch 3200, loss[loss=0.1446, simple_loss=0.2231, pruned_loss=0.03301, over 18476.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2489, pruned_loss=0.03494, over 3595481.12 frames. ], batch size: 44, lr: 4.86e-03, grad_scale: 8.0 +2023-03-09 17:00:55,625 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83149.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 17:01:30,663 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9975, 4.5882, 4.6187, 3.7087, 3.9147, 3.6122, 2.9723, 2.7340], + device='cuda:1'), covar=tensor([0.0191, 0.0165, 0.0079, 0.0258, 0.0290, 0.0211, 0.0620, 0.0764], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0062, 0.0064, 0.0069, 0.0091, 0.0068, 0.0078, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 17:01:43,024 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.642e+02 2.631e+02 3.256e+02 4.142e+02 1.137e+03, threshold=6.512e+02, percent-clipped=5.0 +2023-03-09 17:01:50,252 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83196.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:01:51,141 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83197.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 17:01:53,671 INFO [train.py:898] (1/4) Epoch 23, batch 3250, loss[loss=0.1623, simple_loss=0.2563, pruned_loss=0.03419, over 18356.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.2494, pruned_loss=0.03526, over 3575915.30 frames. ], batch size: 55, lr: 4.85e-03, grad_scale: 8.0 +2023-03-09 17:01:58,000 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-09 17:02:04,735 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0990, 5.5906, 5.6088, 5.5939, 5.0619, 5.5061, 4.9635, 5.4641], + device='cuda:1'), covar=tensor([0.0217, 0.0253, 0.0146, 0.0359, 0.0339, 0.0186, 0.0907, 0.0282], + device='cuda:1'), in_proj_covar=tensor([0.0221, 0.0265, 0.0259, 0.0337, 0.0277, 0.0275, 0.0309, 0.0265], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 17:02:11,751 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8512, 5.2295, 2.6457, 5.1812, 4.8395, 5.1929, 5.0892, 2.4061], + device='cuda:1'), covar=tensor([0.0290, 0.0101, 0.1005, 0.0091, 0.0119, 0.0134, 0.0125, 0.1298], + device='cuda:1'), in_proj_covar=tensor([0.0091, 0.0082, 0.0098, 0.0097, 0.0089, 0.0079, 0.0087, 0.0098], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 17:02:25,229 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83225.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:02:51,887 INFO [train.py:898] (1/4) Epoch 23, batch 3300, loss[loss=0.1596, simple_loss=0.2549, pruned_loss=0.03215, over 18369.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.2497, pruned_loss=0.03527, over 3575917.11 frames. ], batch size: 55, lr: 4.85e-03, grad_scale: 8.0 +2023-03-09 17:03:02,536 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83257.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:03:21,863 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83273.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:03:29,998 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6036, 4.0963, 4.0558, 3.2556, 3.5276, 3.2622, 2.6868, 2.2622], + device='cuda:1'), covar=tensor([0.0225, 0.0189, 0.0111, 0.0328, 0.0333, 0.0234, 0.0697, 0.0924], + device='cuda:1'), in_proj_covar=tensor([0.0073, 0.0062, 0.0065, 0.0070, 0.0091, 0.0068, 0.0078, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 17:03:34,333 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83284.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:03:40,704 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.708e+02 2.590e+02 3.037e+02 3.666e+02 6.079e+02, threshold=6.073e+02, percent-clipped=0.0 +2023-03-09 17:03:50,690 INFO [train.py:898] (1/4) Epoch 23, batch 3350, loss[loss=0.1672, simple_loss=0.2611, pruned_loss=0.03663, over 18492.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.2507, pruned_loss=0.03537, over 3575444.31 frames. ], batch size: 53, lr: 4.85e-03, grad_scale: 8.0 +2023-03-09 17:04:14,280 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83319.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:04:45,123 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83345.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:04:49,292 INFO [train.py:898] (1/4) Epoch 23, batch 3400, loss[loss=0.1684, simple_loss=0.2644, pruned_loss=0.03619, over 18360.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2501, pruned_loss=0.03538, over 3569401.08 frames. ], batch size: 55, lr: 4.85e-03, grad_scale: 8.0 +2023-03-09 17:05:37,372 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.783e+02 2.666e+02 3.258e+02 4.159e+02 1.161e+03, threshold=6.516e+02, percent-clipped=10.0 +2023-03-09 17:05:39,896 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9610, 4.9584, 4.6170, 4.8783, 4.8809, 4.3456, 4.8278, 4.6254], + device='cuda:1'), covar=tensor([0.0473, 0.0489, 0.1375, 0.0795, 0.0603, 0.0481, 0.0440, 0.1085], + device='cuda:1'), in_proj_covar=tensor([0.0503, 0.0573, 0.0714, 0.0444, 0.0461, 0.0521, 0.0548, 0.0689], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 17:05:47,416 INFO [train.py:898] (1/4) Epoch 23, batch 3450, loss[loss=0.168, simple_loss=0.2611, pruned_loss=0.03747, over 18015.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.2497, pruned_loss=0.03532, over 3573129.02 frames. ], batch size: 65, lr: 4.85e-03, grad_scale: 8.0 +2023-03-09 17:06:45,592 INFO [train.py:898] (1/4) Epoch 23, batch 3500, loss[loss=0.1564, simple_loss=0.2518, pruned_loss=0.0305, over 16291.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.2496, pruned_loss=0.03533, over 3567429.18 frames. ], batch size: 95, lr: 4.85e-03, grad_scale: 8.0 +2023-03-09 17:06:53,078 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.59 vs. limit=5.0 +2023-03-09 17:07:22,172 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-03-09 17:07:31,221 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.962e+02 2.487e+02 3.071e+02 3.696e+02 9.873e+02, threshold=6.142e+02, percent-clipped=4.0 +2023-03-09 17:07:41,351 INFO [train.py:898] (1/4) Epoch 23, batch 3550, loss[loss=0.1942, simple_loss=0.2737, pruned_loss=0.05732, over 12709.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.2504, pruned_loss=0.03573, over 3569677.31 frames. ], batch size: 130, lr: 4.85e-03, grad_scale: 8.0 +2023-03-09 17:07:47,200 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.6390, 6.1717, 5.6449, 6.0155, 5.8593, 5.6293, 6.2644, 6.1914], + device='cuda:1'), covar=tensor([0.1153, 0.0699, 0.0471, 0.0621, 0.1229, 0.0616, 0.0538, 0.0689], + device='cuda:1'), in_proj_covar=tensor([0.0618, 0.0539, 0.0389, 0.0566, 0.0761, 0.0556, 0.0776, 0.0589], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 17:08:35,249 INFO [train.py:898] (1/4) Epoch 23, batch 3600, loss[loss=0.1469, simple_loss=0.2332, pruned_loss=0.0303, over 17747.00 frames. ], tot_loss[loss=0.1614, simple_loss=0.251, pruned_loss=0.03592, over 3575887.88 frames. ], batch size: 39, lr: 4.84e-03, grad_scale: 8.0 +2023-03-09 17:08:38,651 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83552.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:09:39,853 INFO [train.py:898] (1/4) Epoch 24, batch 0, loss[loss=0.1439, simple_loss=0.2294, pruned_loss=0.0292, over 18260.00 frames. ], tot_loss[loss=0.1439, simple_loss=0.2294, pruned_loss=0.0292, over 18260.00 frames. ], batch size: 45, lr: 4.74e-03, grad_scale: 8.0 +2023-03-09 17:09:39,854 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 17:09:49,287 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.5495, 4.1600, 5.0191, 4.5544, 3.9295, 5.2841, 4.6363, 4.1269], + device='cuda:1'), covar=tensor([0.0283, 0.0814, 0.0184, 0.0276, 0.0972, 0.0164, 0.0370, 0.0581], + device='cuda:1'), in_proj_covar=tensor([0.0212, 0.0238, 0.0219, 0.0166, 0.0226, 0.0212, 0.0251, 0.0195], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 17:09:51,563 INFO [train.py:932] (1/4) Epoch 24, validation: loss=0.1502, simple_loss=0.2499, pruned_loss=0.02529, over 944034.00 frames. +2023-03-09 17:09:51,563 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 17:10:00,558 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.679e+02 2.611e+02 3.172e+02 4.204e+02 1.377e+03, threshold=6.343e+02, percent-clipped=6.0 +2023-03-09 17:10:34,156 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83619.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:10:50,365 INFO [train.py:898] (1/4) Epoch 24, batch 50, loss[loss=0.1469, simple_loss=0.2336, pruned_loss=0.03013, over 18251.00 frames. ], tot_loss[loss=0.1635, simple_loss=0.2558, pruned_loss=0.03566, over 806869.89 frames. ], batch size: 45, lr: 4.74e-03, grad_scale: 8.0 +2023-03-09 17:10:58,584 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83640.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:11:18,246 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-09 17:11:30,057 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83667.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:11:48,509 INFO [train.py:898] (1/4) Epoch 24, batch 100, loss[loss=0.1456, simple_loss=0.235, pruned_loss=0.02809, over 18411.00 frames. ], tot_loss[loss=0.16, simple_loss=0.251, pruned_loss=0.03445, over 1435898.54 frames. ], batch size: 48, lr: 4.74e-03, grad_scale: 8.0 +2023-03-09 17:11:58,110 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.623e+02 2.468e+02 2.945e+02 3.630e+02 7.467e+02, threshold=5.891e+02, percent-clipped=1.0 +2023-03-09 17:12:46,018 INFO [train.py:898] (1/4) Epoch 24, batch 150, loss[loss=0.1596, simple_loss=0.2551, pruned_loss=0.03208, over 18574.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2505, pruned_loss=0.03446, over 1922997.86 frames. ], batch size: 54, lr: 4.73e-03, grad_scale: 8.0 +2023-03-09 17:13:18,108 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83760.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:13:30,053 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.42 vs. limit=5.0 +2023-03-09 17:13:39,267 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-09 17:13:43,893 INFO [train.py:898] (1/4) Epoch 24, batch 200, loss[loss=0.1381, simple_loss=0.2194, pruned_loss=0.0284, over 18423.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2493, pruned_loss=0.03441, over 2302411.17 frames. ], batch size: 43, lr: 4.73e-03, grad_scale: 4.0 +2023-03-09 17:13:53,811 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.895e+02 2.575e+02 2.989e+02 3.608e+02 5.254e+02, threshold=5.979e+02, percent-clipped=0.0 +2023-03-09 17:14:29,003 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83821.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:14:31,444 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6749, 3.0544, 2.3743, 2.9213, 3.7390, 3.7039, 3.1164, 3.0473], + device='cuda:1'), covar=tensor([0.0174, 0.0235, 0.0680, 0.0313, 0.0138, 0.0109, 0.0359, 0.0294], + device='cuda:1'), in_proj_covar=tensor([0.0142, 0.0141, 0.0165, 0.0162, 0.0135, 0.0121, 0.0158, 0.0161], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 17:14:42,253 INFO [train.py:898] (1/4) Epoch 24, batch 250, loss[loss=0.1772, simple_loss=0.2651, pruned_loss=0.04467, over 17940.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2508, pruned_loss=0.03512, over 2589021.68 frames. ], batch size: 65, lr: 4.73e-03, grad_scale: 4.0 +2023-03-09 17:14:56,849 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7953, 5.3048, 5.2660, 5.3145, 4.7633, 5.1461, 4.6756, 5.1205], + device='cuda:1'), covar=tensor([0.0228, 0.0251, 0.0189, 0.0386, 0.0413, 0.0231, 0.1030, 0.0306], + device='cuda:1'), in_proj_covar=tensor([0.0221, 0.0266, 0.0259, 0.0338, 0.0278, 0.0274, 0.0310, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 17:15:04,754 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83852.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:15:40,424 INFO [train.py:898] (1/4) Epoch 24, batch 300, loss[loss=0.1628, simple_loss=0.2544, pruned_loss=0.03554, over 18267.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.2508, pruned_loss=0.03499, over 2812314.89 frames. ], batch size: 49, lr: 4.73e-03, grad_scale: 4.0 +2023-03-09 17:15:50,699 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.883e+02 2.677e+02 3.133e+02 3.610e+02 5.796e+02, threshold=6.266e+02, percent-clipped=0.0 +2023-03-09 17:16:00,985 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83900.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:16:25,831 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3614, 2.7651, 2.4114, 2.7299, 3.4749, 3.3483, 3.0034, 2.7612], + device='cuda:1'), covar=tensor([0.0224, 0.0322, 0.0610, 0.0397, 0.0232, 0.0177, 0.0399, 0.0400], + device='cuda:1'), in_proj_covar=tensor([0.0142, 0.0141, 0.0165, 0.0162, 0.0135, 0.0121, 0.0157, 0.0161], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 17:16:38,565 INFO [train.py:898] (1/4) Epoch 24, batch 350, loss[loss=0.1458, simple_loss=0.2308, pruned_loss=0.03041, over 18248.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2503, pruned_loss=0.03512, over 2982898.91 frames. ], batch size: 45, lr: 4.73e-03, grad_scale: 2.0 +2023-03-09 17:16:46,855 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83940.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:16:47,185 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.85 vs. limit=5.0 +2023-03-09 17:16:56,904 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83949.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:17:34,049 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5815, 3.3247, 5.1230, 3.1374, 4.4573, 2.4088, 2.8394, 1.8331], + device='cuda:1'), covar=tensor([0.1338, 0.1108, 0.0142, 0.0819, 0.0466, 0.2857, 0.2784, 0.2265], + device='cuda:1'), in_proj_covar=tensor([0.0223, 0.0246, 0.0206, 0.0202, 0.0259, 0.0274, 0.0328, 0.0240], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 17:17:36,994 INFO [train.py:898] (1/4) Epoch 24, batch 400, loss[loss=0.1475, simple_loss=0.2426, pruned_loss=0.02621, over 18391.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2517, pruned_loss=0.03532, over 3095380.24 frames. ], batch size: 50, lr: 4.73e-03, grad_scale: 4.0 +2023-03-09 17:17:39,571 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83985.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:17:42,757 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83988.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:17:48,086 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.909e+02 2.479e+02 2.899e+02 3.539e+02 5.465e+02, threshold=5.798e+02, percent-clipped=0.0 +2023-03-09 17:18:13,500 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84010.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 17:18:39,725 INFO [train.py:898] (1/4) Epoch 24, batch 450, loss[loss=0.1488, simple_loss=0.2352, pruned_loss=0.03121, over 18248.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2513, pruned_loss=0.03543, over 3214057.81 frames. ], batch size: 47, lr: 4.73e-03, grad_scale: 4.0 +2023-03-09 17:18:55,004 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84046.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:18:57,554 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-09 17:19:01,607 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6700, 3.6556, 3.4796, 3.1208, 3.3588, 2.8012, 2.7804, 3.5864], + device='cuda:1'), covar=tensor([0.0073, 0.0107, 0.0092, 0.0146, 0.0101, 0.0203, 0.0228, 0.0097], + device='cuda:1'), in_proj_covar=tensor([0.0146, 0.0167, 0.0139, 0.0192, 0.0148, 0.0182, 0.0188, 0.0126], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 17:19:38,438 INFO [train.py:898] (1/4) Epoch 24, batch 500, loss[loss=0.1441, simple_loss=0.2283, pruned_loss=0.02996, over 18495.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.251, pruned_loss=0.03515, over 3300138.32 frames. ], batch size: 44, lr: 4.73e-03, grad_scale: 4.0 +2023-03-09 17:19:40,948 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2674, 5.3430, 5.4931, 5.5065, 5.1785, 6.0032, 5.6378, 5.2786], + device='cuda:1'), covar=tensor([0.1024, 0.0605, 0.0712, 0.0712, 0.1406, 0.0790, 0.0633, 0.1709], + device='cuda:1'), in_proj_covar=tensor([0.0364, 0.0293, 0.0315, 0.0321, 0.0331, 0.0430, 0.0290, 0.0423], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 17:19:49,795 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.687e+02 2.790e+02 3.387e+02 4.220e+02 9.031e+02, threshold=6.774e+02, percent-clipped=2.0 +2023-03-09 17:19:54,640 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84097.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:20:16,873 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84116.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:20:36,452 INFO [train.py:898] (1/4) Epoch 24, batch 550, loss[loss=0.1608, simple_loss=0.2554, pruned_loss=0.03307, over 18309.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.251, pruned_loss=0.03515, over 3365135.05 frames. ], batch size: 54, lr: 4.72e-03, grad_scale: 4.0 +2023-03-09 17:21:05,082 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84158.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:21:12,520 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8641, 3.3505, 4.6142, 3.8444, 2.9816, 4.8021, 4.0179, 3.1456], + device='cuda:1'), covar=tensor([0.0443, 0.1214, 0.0247, 0.0425, 0.1442, 0.0199, 0.0562, 0.0883], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0239, 0.0218, 0.0166, 0.0225, 0.0213, 0.0251, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 17:21:15,982 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8815, 5.3109, 2.9916, 5.1879, 5.0057, 5.3081, 5.1760, 2.7158], + device='cuda:1'), covar=tensor([0.0212, 0.0069, 0.0680, 0.0065, 0.0080, 0.0073, 0.0088, 0.0937], + device='cuda:1'), in_proj_covar=tensor([0.0091, 0.0082, 0.0097, 0.0096, 0.0088, 0.0077, 0.0087, 0.0098], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 17:21:33,936 INFO [train.py:898] (1/4) Epoch 24, batch 600, loss[loss=0.1725, simple_loss=0.2698, pruned_loss=0.03763, over 17874.00 frames. ], tot_loss[loss=0.161, simple_loss=0.2514, pruned_loss=0.03527, over 3423150.48 frames. ], batch size: 65, lr: 4.72e-03, grad_scale: 4.0 +2023-03-09 17:21:45,737 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.833e+02 2.553e+02 3.031e+02 3.741e+02 6.860e+02, threshold=6.062e+02, percent-clipped=1.0 +2023-03-09 17:21:53,140 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84199.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:22:32,599 INFO [train.py:898] (1/4) Epoch 24, batch 650, loss[loss=0.1583, simple_loss=0.2552, pruned_loss=0.03073, over 18373.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2504, pruned_loss=0.03475, over 3474212.28 frames. ], batch size: 50, lr: 4.72e-03, grad_scale: 4.0 +2023-03-09 17:22:36,508 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84236.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:23:04,099 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84260.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:23:12,780 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2041, 5.3508, 5.4694, 5.4847, 5.1876, 6.0327, 5.6465, 5.2778], + device='cuda:1'), covar=tensor([0.1213, 0.0655, 0.0791, 0.0926, 0.1354, 0.0726, 0.0651, 0.1666], + device='cuda:1'), in_proj_covar=tensor([0.0370, 0.0297, 0.0319, 0.0324, 0.0335, 0.0434, 0.0291, 0.0425], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 17:23:30,932 INFO [train.py:898] (1/4) Epoch 24, batch 700, loss[loss=0.1393, simple_loss=0.2232, pruned_loss=0.02775, over 17174.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.25, pruned_loss=0.03481, over 3496727.65 frames. ], batch size: 38, lr: 4.72e-03, grad_scale: 4.0 +2023-03-09 17:23:42,556 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.851e+02 2.577e+02 2.948e+02 3.805e+02 8.453e+02, threshold=5.897e+02, percent-clipped=4.0 +2023-03-09 17:23:47,458 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84297.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:23:54,341 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8449, 3.2994, 4.6088, 3.8772, 2.8914, 4.7846, 4.0647, 3.1698], + device='cuda:1'), covar=tensor([0.0515, 0.1376, 0.0307, 0.0488, 0.1619, 0.0255, 0.0639, 0.0937], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0242, 0.0220, 0.0168, 0.0227, 0.0216, 0.0254, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 17:23:56,348 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84305.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 17:24:06,585 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6606, 2.8015, 4.2611, 3.6254, 2.8406, 4.4816, 3.8942, 2.7765], + device='cuda:1'), covar=tensor([0.0502, 0.1609, 0.0331, 0.0469, 0.1508, 0.0262, 0.0612, 0.1055], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0243, 0.0221, 0.0168, 0.0227, 0.0216, 0.0255, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 17:24:29,563 INFO [train.py:898] (1/4) Epoch 24, batch 750, loss[loss=0.1663, simple_loss=0.2615, pruned_loss=0.03554, over 17984.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.2502, pruned_loss=0.03485, over 3520119.68 frames. ], batch size: 65, lr: 4.72e-03, grad_scale: 4.0 +2023-03-09 17:24:33,185 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84336.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:24:38,594 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84341.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:25:27,052 INFO [train.py:898] (1/4) Epoch 24, batch 800, loss[loss=0.1683, simple_loss=0.2593, pruned_loss=0.03868, over 18241.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.2503, pruned_loss=0.03494, over 3542895.02 frames. ], batch size: 60, lr: 4.72e-03, grad_scale: 8.0 +2023-03-09 17:25:39,054 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.586e+02 2.623e+02 3.073e+02 3.761e+02 8.070e+02, threshold=6.147e+02, percent-clipped=7.0 +2023-03-09 17:25:43,827 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84397.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:26:05,650 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84416.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:26:16,428 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-09 17:26:21,903 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-09 17:26:25,762 INFO [train.py:898] (1/4) Epoch 24, batch 850, loss[loss=0.1416, simple_loss=0.2278, pruned_loss=0.02773, over 18496.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.2511, pruned_loss=0.03534, over 3551176.53 frames. ], batch size: 44, lr: 4.72e-03, grad_scale: 8.0 +2023-03-09 17:26:49,151 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84453.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:27:01,313 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84464.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:27:23,392 INFO [train.py:898] (1/4) Epoch 24, batch 900, loss[loss=0.1567, simple_loss=0.2525, pruned_loss=0.03044, over 18391.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.251, pruned_loss=0.03525, over 3553960.55 frames. ], batch size: 52, lr: 4.71e-03, grad_scale: 8.0 +2023-03-09 17:27:28,228 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8737, 3.8610, 3.7158, 3.3804, 3.5250, 2.9806, 3.1038, 3.9293], + device='cuda:1'), covar=tensor([0.0064, 0.0093, 0.0080, 0.0113, 0.0114, 0.0175, 0.0182, 0.0052], + device='cuda:1'), in_proj_covar=tensor([0.0147, 0.0168, 0.0139, 0.0192, 0.0150, 0.0183, 0.0188, 0.0126], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 17:27:35,099 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.772e+02 2.570e+02 3.063e+02 4.034e+02 9.733e+02, threshold=6.126e+02, percent-clipped=4.0 +2023-03-09 17:27:43,962 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-09 17:28:11,912 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84525.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:28:21,160 INFO [train.py:898] (1/4) Epoch 24, batch 950, loss[loss=0.1509, simple_loss=0.2289, pruned_loss=0.03644, over 18558.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.251, pruned_loss=0.03525, over 3565019.17 frames. ], batch size: 45, lr: 4.71e-03, grad_scale: 8.0 +2023-03-09 17:28:46,217 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9232, 5.4001, 5.3819, 5.3959, 4.8106, 5.2551, 4.7383, 5.2746], + device='cuda:1'), covar=tensor([0.0242, 0.0282, 0.0181, 0.0364, 0.0451, 0.0250, 0.1056, 0.0289], + device='cuda:1'), in_proj_covar=tensor([0.0222, 0.0269, 0.0263, 0.0341, 0.0281, 0.0277, 0.0314, 0.0268], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 17:28:47,309 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84555.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:29:19,839 INFO [train.py:898] (1/4) Epoch 24, batch 1000, loss[loss=0.1579, simple_loss=0.2501, pruned_loss=0.03282, over 18368.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2503, pruned_loss=0.03513, over 3569227.58 frames. ], batch size: 55, lr: 4.71e-03, grad_scale: 8.0 +2023-03-09 17:29:24,281 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84586.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:29:30,738 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84592.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:29:31,650 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.810e+02 2.603e+02 3.017e+02 3.476e+02 5.470e+02, threshold=6.035e+02, percent-clipped=0.0 +2023-03-09 17:29:38,195 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6949, 5.1627, 5.1199, 5.2569, 4.6042, 5.0616, 4.0501, 5.0599], + device='cuda:1'), covar=tensor([0.0279, 0.0412, 0.0324, 0.0443, 0.0481, 0.0322, 0.1945, 0.0369], + device='cuda:1'), in_proj_covar=tensor([0.0220, 0.0267, 0.0261, 0.0339, 0.0279, 0.0275, 0.0312, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 17:29:46,274 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84605.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:29:48,614 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9023, 3.8199, 5.3979, 3.2083, 4.8038, 2.7034, 3.3066, 1.9319], + device='cuda:1'), covar=tensor([0.1160, 0.0870, 0.0142, 0.0851, 0.0436, 0.2625, 0.2504, 0.2100], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0249, 0.0208, 0.0203, 0.0262, 0.0276, 0.0331, 0.0242], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 17:30:17,803 INFO [train.py:898] (1/4) Epoch 24, batch 1050, loss[loss=0.1829, simple_loss=0.2791, pruned_loss=0.04333, over 17984.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.2504, pruned_loss=0.03521, over 3581766.37 frames. ], batch size: 65, lr: 4.71e-03, grad_scale: 8.0 +2023-03-09 17:30:18,241 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84633.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:30:27,864 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84641.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:30:41,758 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84653.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:30:42,019 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9664, 4.2219, 2.5246, 4.0711, 5.2769, 2.8049, 3.7423, 3.8000], + device='cuda:1'), covar=tensor([0.0169, 0.1363, 0.1434, 0.0558, 0.0075, 0.0956, 0.0629, 0.0828], + device='cuda:1'), in_proj_covar=tensor([0.0174, 0.0272, 0.0206, 0.0200, 0.0133, 0.0184, 0.0218, 0.0226], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 17:31:16,094 INFO [train.py:898] (1/4) Epoch 24, batch 1100, loss[loss=0.1395, simple_loss=0.2266, pruned_loss=0.02616, over 18445.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.25, pruned_loss=0.03475, over 3587782.12 frames. ], batch size: 43, lr: 4.71e-03, grad_scale: 8.0 +2023-03-09 17:31:23,089 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84689.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:31:27,899 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84692.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:31:28,853 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.674e+02 2.619e+02 3.034e+02 3.649e+02 7.124e+02, threshold=6.067e+02, percent-clipped=2.0 +2023-03-09 17:31:30,437 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84694.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:32:14,532 INFO [train.py:898] (1/4) Epoch 24, batch 1150, loss[loss=0.1628, simple_loss=0.2538, pruned_loss=0.03594, over 18567.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2503, pruned_loss=0.03484, over 3597391.66 frames. ], batch size: 54, lr: 4.71e-03, grad_scale: 8.0 +2023-03-09 17:32:38,345 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84753.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:32:49,624 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.73 vs. limit=5.0 +2023-03-09 17:33:12,316 INFO [train.py:898] (1/4) Epoch 24, batch 1200, loss[loss=0.1406, simple_loss=0.2268, pruned_loss=0.02719, over 18181.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2506, pruned_loss=0.03476, over 3610124.65 frames. ], batch size: 44, lr: 4.71e-03, grad_scale: 8.0 +2023-03-09 17:33:24,630 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.879e+02 2.822e+02 3.199e+02 4.147e+02 1.139e+03, threshold=6.397e+02, percent-clipped=6.0 +2023-03-09 17:33:32,880 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84800.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:33:33,883 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84801.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:33:40,993 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5864, 2.8526, 2.5696, 2.7992, 3.6442, 3.4831, 3.0876, 2.8770], + device='cuda:1'), covar=tensor([0.0177, 0.0279, 0.0595, 0.0484, 0.0187, 0.0175, 0.0409, 0.0414], + device='cuda:1'), in_proj_covar=tensor([0.0143, 0.0142, 0.0167, 0.0164, 0.0137, 0.0122, 0.0159, 0.0163], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 17:34:10,449 INFO [train.py:898] (1/4) Epoch 24, batch 1250, loss[loss=0.1805, simple_loss=0.2732, pruned_loss=0.04386, over 16404.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2508, pruned_loss=0.03512, over 3597222.62 frames. ], batch size: 94, lr: 4.70e-03, grad_scale: 4.0 +2023-03-09 17:34:36,660 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84855.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:34:43,697 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84861.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:34:50,679 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1516, 5.5556, 5.1968, 5.4087, 5.2146, 5.0830, 5.6295, 5.5730], + device='cuda:1'), covar=tensor([0.1199, 0.0807, 0.0628, 0.0677, 0.1382, 0.0705, 0.0602, 0.0757], + device='cuda:1'), in_proj_covar=tensor([0.0626, 0.0551, 0.0389, 0.0571, 0.0770, 0.0562, 0.0785, 0.0596], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 17:35:03,781 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8434, 3.4387, 4.7026, 2.8181, 4.1075, 2.6152, 2.9056, 1.8978], + device='cuda:1'), covar=tensor([0.1153, 0.1004, 0.0166, 0.1023, 0.0570, 0.2559, 0.2688, 0.2187], + device='cuda:1'), in_proj_covar=tensor([0.0227, 0.0250, 0.0209, 0.0205, 0.0265, 0.0278, 0.0334, 0.0244], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 17:35:06,896 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84881.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:35:08,463 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-03-09 17:35:08,995 INFO [train.py:898] (1/4) Epoch 24, batch 1300, loss[loss=0.1515, simple_loss=0.2359, pruned_loss=0.03357, over 18496.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.2507, pruned_loss=0.03504, over 3593013.55 frames. ], batch size: 44, lr: 4.70e-03, grad_scale: 4.0 +2023-03-09 17:35:19,312 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84892.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:35:21,284 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.784e+02 2.717e+02 3.045e+02 3.603e+02 7.814e+02, threshold=6.090e+02, percent-clipped=3.0 +2023-03-09 17:35:31,907 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84903.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:35:39,994 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7807, 4.7280, 4.8212, 4.5614, 4.6330, 4.6079, 4.8728, 4.8655], + device='cuda:1'), covar=tensor([0.0072, 0.0073, 0.0067, 0.0119, 0.0066, 0.0165, 0.0090, 0.0108], + device='cuda:1'), in_proj_covar=tensor([0.0097, 0.0071, 0.0077, 0.0096, 0.0077, 0.0108, 0.0090, 0.0089], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 17:36:07,824 INFO [train.py:898] (1/4) Epoch 24, batch 1350, loss[loss=0.1417, simple_loss=0.2316, pruned_loss=0.02586, over 18270.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2505, pruned_loss=0.03492, over 3591502.00 frames. ], batch size: 47, lr: 4.70e-03, grad_scale: 4.0 +2023-03-09 17:36:16,022 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84940.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:36:28,910 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84951.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:36:40,318 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4136, 5.3557, 5.0059, 5.3289, 5.2995, 4.6854, 5.2016, 5.0098], + device='cuda:1'), covar=tensor([0.0426, 0.0506, 0.1327, 0.0698, 0.0621, 0.0430, 0.0463, 0.1143], + device='cuda:1'), in_proj_covar=tensor([0.0511, 0.0575, 0.0722, 0.0449, 0.0474, 0.0525, 0.0560, 0.0699], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 17:37:05,448 INFO [train.py:898] (1/4) Epoch 24, batch 1400, loss[loss=0.1392, simple_loss=0.2202, pruned_loss=0.02912, over 18394.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2504, pruned_loss=0.03494, over 3582121.26 frames. ], batch size: 43, lr: 4.70e-03, grad_scale: 4.0 +2023-03-09 17:37:12,989 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84989.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:37:15,467 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1250, 3.9070, 5.0017, 4.7092, 3.4802, 3.4334, 4.7878, 5.3641], + device='cuda:1'), covar=tensor([0.0690, 0.1525, 0.0271, 0.0347, 0.0898, 0.1019, 0.0333, 0.0248], + device='cuda:1'), in_proj_covar=tensor([0.0153, 0.0280, 0.0164, 0.0185, 0.0195, 0.0194, 0.0199, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 17:37:16,467 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84992.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:37:18,346 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.714e+02 2.377e+02 2.923e+02 3.851e+02 7.184e+02, threshold=5.846e+02, percent-clipped=2.0 +2023-03-09 17:37:39,018 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85012.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:37:41,778 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.2851, 4.1955, 4.0522, 4.2120, 4.2190, 3.7678, 4.2130, 4.0272], + device='cuda:1'), covar=tensor([0.0487, 0.0809, 0.1262, 0.0707, 0.0666, 0.0476, 0.0491, 0.1086], + device='cuda:1'), in_proj_covar=tensor([0.0510, 0.0575, 0.0722, 0.0449, 0.0473, 0.0525, 0.0559, 0.0699], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 17:38:03,912 INFO [train.py:898] (1/4) Epoch 24, batch 1450, loss[loss=0.161, simple_loss=0.2528, pruned_loss=0.03455, over 18484.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.25, pruned_loss=0.03492, over 3591771.82 frames. ], batch size: 51, lr: 4.70e-03, grad_scale: 4.0 +2023-03-09 17:38:12,402 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85040.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:38:38,530 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5038, 3.2587, 4.3785, 3.9439, 3.1282, 2.9867, 3.9639, 4.5009], + device='cuda:1'), covar=tensor([0.0889, 0.1445, 0.0262, 0.0452, 0.0984, 0.1165, 0.0425, 0.0338], + device='cuda:1'), in_proj_covar=tensor([0.0152, 0.0279, 0.0164, 0.0185, 0.0195, 0.0194, 0.0198, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 17:38:58,470 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5602, 3.4494, 2.3220, 4.2966, 3.0068, 4.1064, 2.4129, 3.8976], + device='cuda:1'), covar=tensor([0.0638, 0.0871, 0.1491, 0.0523, 0.0928, 0.0396, 0.1262, 0.0438], + device='cuda:1'), in_proj_covar=tensor([0.0220, 0.0230, 0.0193, 0.0292, 0.0196, 0.0274, 0.0206, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 17:39:01,459 INFO [train.py:898] (1/4) Epoch 24, batch 1500, loss[loss=0.1475, simple_loss=0.2434, pruned_loss=0.02583, over 18395.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2495, pruned_loss=0.03458, over 3600868.99 frames. ], batch size: 48, lr: 4.70e-03, grad_scale: 4.0 +2023-03-09 17:39:06,152 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9124, 5.3055, 2.4708, 5.2055, 5.0196, 5.2971, 5.1365, 2.7106], + device='cuda:1'), covar=tensor([0.0237, 0.0111, 0.0983, 0.0081, 0.0093, 0.0113, 0.0125, 0.1141], + device='cuda:1'), in_proj_covar=tensor([0.0090, 0.0082, 0.0096, 0.0096, 0.0087, 0.0077, 0.0086, 0.0097], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 17:39:14,114 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.797e+02 2.574e+02 3.009e+02 3.655e+02 5.814e+02, threshold=6.018e+02, percent-clipped=0.0 +2023-03-09 17:39:28,927 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85107.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:39:31,589 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-09 17:39:37,877 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85115.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:39:59,272 INFO [train.py:898] (1/4) Epoch 24, batch 1550, loss[loss=0.1567, simple_loss=0.2499, pruned_loss=0.03172, over 18537.00 frames. ], tot_loss[loss=0.1592, simple_loss=0.2496, pruned_loss=0.03442, over 3612941.68 frames. ], batch size: 49, lr: 4.70e-03, grad_scale: 4.0 +2023-03-09 17:40:25,884 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85156.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:40:31,792 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0046, 4.2079, 2.5764, 4.1271, 5.2670, 2.7346, 3.8354, 3.8428], + device='cuda:1'), covar=tensor([0.0169, 0.1231, 0.1468, 0.0631, 0.0095, 0.1069, 0.0613, 0.0797], + device='cuda:1'), in_proj_covar=tensor([0.0175, 0.0272, 0.0206, 0.0201, 0.0133, 0.0185, 0.0218, 0.0227], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 17:40:39,400 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85168.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 17:40:48,529 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85176.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:40:55,186 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85181.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:40:57,799 INFO [train.py:898] (1/4) Epoch 24, batch 1600, loss[loss=0.1485, simple_loss=0.2289, pruned_loss=0.03404, over 18440.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.2502, pruned_loss=0.03468, over 3619102.13 frames. ], batch size: 43, lr: 4.69e-03, grad_scale: 8.0 +2023-03-09 17:41:10,165 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-03-09 17:41:10,346 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.996e+02 2.511e+02 2.791e+02 3.550e+02 5.356e+02, threshold=5.582e+02, percent-clipped=0.0 +2023-03-09 17:41:51,990 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85229.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:41:57,474 INFO [train.py:898] (1/4) Epoch 24, batch 1650, loss[loss=0.17, simple_loss=0.2674, pruned_loss=0.03636, over 17722.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2493, pruned_loss=0.03427, over 3615865.42 frames. ], batch size: 70, lr: 4.69e-03, grad_scale: 8.0 +2023-03-09 17:42:02,345 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7326, 2.3471, 2.6661, 2.8046, 3.3567, 4.8737, 4.7880, 3.3337], + device='cuda:1'), covar=tensor([0.1881, 0.2464, 0.2956, 0.1796, 0.2298, 0.0275, 0.0369, 0.1011], + device='cuda:1'), in_proj_covar=tensor([0.0313, 0.0354, 0.0393, 0.0284, 0.0394, 0.0252, 0.0300, 0.0264], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 17:42:22,168 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6696, 4.3721, 4.2923, 3.2433, 3.6465, 3.2857, 2.6097, 2.4360], + device='cuda:1'), covar=tensor([0.0243, 0.0169, 0.0103, 0.0317, 0.0350, 0.0246, 0.0728, 0.0877], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0061, 0.0065, 0.0069, 0.0091, 0.0068, 0.0077, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 17:42:55,788 INFO [train.py:898] (1/4) Epoch 24, batch 1700, loss[loss=0.1701, simple_loss=0.263, pruned_loss=0.03862, over 17004.00 frames. ], tot_loss[loss=0.1592, simple_loss=0.2494, pruned_loss=0.03456, over 3605543.08 frames. ], batch size: 78, lr: 4.69e-03, grad_scale: 8.0 +2023-03-09 17:43:03,202 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85289.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:43:08,657 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.937e+02 2.815e+02 3.263e+02 3.900e+02 7.200e+02, threshold=6.526e+02, percent-clipped=2.0 +2023-03-09 17:43:23,860 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85307.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:43:50,758 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-03-09 17:43:52,192 INFO [train.py:898] (1/4) Epoch 24, batch 1750, loss[loss=0.144, simple_loss=0.2331, pruned_loss=0.02742, over 18295.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2491, pruned_loss=0.03457, over 3611698.83 frames. ], batch size: 49, lr: 4.69e-03, grad_scale: 8.0 +2023-03-09 17:43:57,786 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85337.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:44:17,132 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2124, 5.3123, 4.6099, 5.1502, 5.2770, 4.7876, 5.1224, 4.7745], + device='cuda:1'), covar=tensor([0.0928, 0.0703, 0.2497, 0.1286, 0.0766, 0.0488, 0.0779, 0.1512], + device='cuda:1'), in_proj_covar=tensor([0.0506, 0.0571, 0.0717, 0.0447, 0.0468, 0.0522, 0.0557, 0.0696], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 17:44:27,842 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.67 vs. limit=2.0 +2023-03-09 17:44:49,062 INFO [train.py:898] (1/4) Epoch 24, batch 1800, loss[loss=0.1401, simple_loss=0.2264, pruned_loss=0.02693, over 18262.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2492, pruned_loss=0.03481, over 3605359.53 frames. ], batch size: 47, lr: 4.69e-03, grad_scale: 8.0 +2023-03-09 17:45:02,452 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.840e+02 2.661e+02 3.005e+02 3.822e+02 6.057e+02, threshold=6.010e+02, percent-clipped=0.0 +2023-03-09 17:45:05,720 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 17:45:14,319 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0628, 5.5001, 2.8834, 5.3843, 5.1283, 5.4795, 5.3239, 2.6598], + device='cuda:1'), covar=tensor([0.0211, 0.0086, 0.0790, 0.0087, 0.0088, 0.0118, 0.0106, 0.1144], + device='cuda:1'), in_proj_covar=tensor([0.0088, 0.0081, 0.0095, 0.0095, 0.0086, 0.0076, 0.0085, 0.0095], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 17:45:29,681 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9096, 5.3373, 2.7934, 5.1702, 5.0408, 5.3650, 5.1393, 2.6454], + device='cuda:1'), covar=tensor([0.0216, 0.0059, 0.0762, 0.0074, 0.0078, 0.0067, 0.0087, 0.1015], + device='cuda:1'), in_proj_covar=tensor([0.0089, 0.0081, 0.0096, 0.0095, 0.0087, 0.0076, 0.0085, 0.0096], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 17:45:42,309 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85428.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:45:47,766 INFO [train.py:898] (1/4) Epoch 24, batch 1850, loss[loss=0.1593, simple_loss=0.2572, pruned_loss=0.03064, over 18501.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2492, pruned_loss=0.03473, over 3587058.62 frames. ], batch size: 51, lr: 4.69e-03, grad_scale: 8.0 +2023-03-09 17:46:15,474 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85456.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:46:23,636 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85463.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 17:46:32,416 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85471.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:46:45,574 INFO [train.py:898] (1/4) Epoch 24, batch 1900, loss[loss=0.15, simple_loss=0.2395, pruned_loss=0.03023, over 18402.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.2497, pruned_loss=0.03494, over 3582396.24 frames. ], batch size: 48, lr: 4.69e-03, grad_scale: 8.0 +2023-03-09 17:46:45,979 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85483.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:46:52,913 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85489.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:46:58,608 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.851e+02 2.648e+02 3.187e+02 3.710e+02 7.319e+02, threshold=6.375e+02, percent-clipped=2.0 +2023-03-09 17:47:10,738 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85504.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:47:40,117 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85530.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:47:43,283 INFO [train.py:898] (1/4) Epoch 24, batch 1950, loss[loss=0.157, simple_loss=0.2496, pruned_loss=0.03219, over 18492.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.2497, pruned_loss=0.03497, over 3589941.89 frames. ], batch size: 53, lr: 4.68e-03, grad_scale: 8.0 +2023-03-09 17:47:57,233 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85544.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:48:16,762 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6240, 3.5996, 4.7087, 4.3158, 3.3763, 2.9662, 4.3580, 4.9620], + device='cuda:1'), covar=tensor([0.0839, 0.1317, 0.0287, 0.0373, 0.0874, 0.1192, 0.0368, 0.0388], + device='cuda:1'), in_proj_covar=tensor([0.0153, 0.0281, 0.0165, 0.0186, 0.0196, 0.0196, 0.0199, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 17:48:19,409 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-03-09 17:48:41,755 INFO [train.py:898] (1/4) Epoch 24, batch 2000, loss[loss=0.1828, simple_loss=0.2812, pruned_loss=0.04222, over 18269.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2501, pruned_loss=0.03522, over 3581394.15 frames. ], batch size: 57, lr: 4.68e-03, grad_scale: 8.0 +2023-03-09 17:48:44,347 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7193, 2.4807, 2.6797, 2.6337, 3.3384, 4.9760, 4.7850, 3.3840], + device='cuda:1'), covar=tensor([0.1997, 0.2526, 0.2880, 0.2060, 0.2396, 0.0272, 0.0385, 0.1047], + device='cuda:1'), in_proj_covar=tensor([0.0314, 0.0354, 0.0393, 0.0285, 0.0395, 0.0254, 0.0300, 0.0264], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 17:48:50,861 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85591.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:48:55,651 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.729e+02 2.662e+02 3.281e+02 4.045e+02 7.861e+02, threshold=6.561e+02, percent-clipped=1.0 +2023-03-09 17:48:59,958 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1257, 2.5241, 3.3028, 3.0407, 2.5152, 3.3981, 3.2487, 2.4867], + device='cuda:1'), covar=tensor([0.0491, 0.1219, 0.0449, 0.0429, 0.1280, 0.0350, 0.0650, 0.0920], + device='cuda:1'), in_proj_covar=tensor([0.0213, 0.0238, 0.0219, 0.0167, 0.0225, 0.0215, 0.0254, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 17:49:10,617 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85607.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:49:31,481 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85625.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:49:40,100 INFO [train.py:898] (1/4) Epoch 24, batch 2050, loss[loss=0.1455, simple_loss=0.2281, pruned_loss=0.03145, over 18156.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.2503, pruned_loss=0.03552, over 3562460.09 frames. ], batch size: 44, lr: 4.68e-03, grad_scale: 8.0 +2023-03-09 17:49:47,766 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-09 17:50:06,777 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85655.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:50:18,322 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-09 17:50:38,494 INFO [train.py:898] (1/4) Epoch 24, batch 2100, loss[loss=0.1749, simple_loss=0.2743, pruned_loss=0.03771, over 18487.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2498, pruned_loss=0.03535, over 3569717.06 frames. ], batch size: 59, lr: 4.68e-03, grad_scale: 8.0 +2023-03-09 17:50:38,872 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6388, 2.9097, 4.4495, 3.6744, 2.8399, 4.6803, 3.9153, 2.9590], + device='cuda:1'), covar=tensor([0.0493, 0.1331, 0.0241, 0.0431, 0.1348, 0.0199, 0.0513, 0.0935], + device='cuda:1'), in_proj_covar=tensor([0.0214, 0.0239, 0.0219, 0.0168, 0.0227, 0.0215, 0.0254, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 17:50:42,200 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85686.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 17:50:51,290 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4642, 3.3259, 2.4590, 4.2806, 3.0131, 4.0933, 2.5798, 3.8408], + device='cuda:1'), covar=tensor([0.0627, 0.0834, 0.1294, 0.0520, 0.0818, 0.0334, 0.1065, 0.0397], + device='cuda:1'), in_proj_covar=tensor([0.0218, 0.0228, 0.0192, 0.0290, 0.0195, 0.0270, 0.0203, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 17:50:52,032 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.890e+02 2.606e+02 3.124e+02 4.074e+02 9.215e+02, threshold=6.247e+02, percent-clipped=3.0 +2023-03-09 17:50:54,676 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6280, 3.4935, 2.4885, 4.4923, 3.2004, 4.2907, 2.6181, 4.0171], + device='cuda:1'), covar=tensor([0.0627, 0.0780, 0.1288, 0.0399, 0.0758, 0.0271, 0.1074, 0.0378], + device='cuda:1'), in_proj_covar=tensor([0.0218, 0.0228, 0.0192, 0.0290, 0.0195, 0.0270, 0.0203, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 17:51:37,109 INFO [train.py:898] (1/4) Epoch 24, batch 2150, loss[loss=0.1548, simple_loss=0.2529, pruned_loss=0.02838, over 18392.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2499, pruned_loss=0.03525, over 3583770.90 frames. ], batch size: 52, lr: 4.68e-03, grad_scale: 8.0 +2023-03-09 17:52:12,547 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85763.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:52:21,501 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85771.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:52:35,301 INFO [train.py:898] (1/4) Epoch 24, batch 2200, loss[loss=0.1656, simple_loss=0.2606, pruned_loss=0.03528, over 16192.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2512, pruned_loss=0.03548, over 3586735.00 frames. ], batch size: 94, lr: 4.68e-03, grad_scale: 4.0 +2023-03-09 17:52:36,642 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85784.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:52:49,955 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.809e+02 2.630e+02 3.062e+02 3.735e+02 6.133e+02, threshold=6.125e+02, percent-clipped=0.0 +2023-03-09 17:53:08,616 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85811.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:53:17,770 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85819.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:53:23,503 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1609, 5.1756, 5.2985, 4.9971, 4.9957, 5.0594, 5.3607, 5.3158], + device='cuda:1'), covar=tensor([0.0053, 0.0051, 0.0042, 0.0091, 0.0047, 0.0128, 0.0054, 0.0074], + device='cuda:1'), in_proj_covar=tensor([0.0096, 0.0071, 0.0076, 0.0096, 0.0077, 0.0106, 0.0089, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 17:53:33,940 INFO [train.py:898] (1/4) Epoch 24, batch 2250, loss[loss=0.1848, simple_loss=0.2797, pruned_loss=0.04499, over 15868.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.2511, pruned_loss=0.03535, over 3586181.04 frames. ], batch size: 94, lr: 4.68e-03, grad_scale: 4.0 +2023-03-09 17:53:41,002 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85839.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:54:23,465 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85875.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:54:33,026 INFO [train.py:898] (1/4) Epoch 24, batch 2300, loss[loss=0.1549, simple_loss=0.2527, pruned_loss=0.02849, over 18488.00 frames. ], tot_loss[loss=0.161, simple_loss=0.2512, pruned_loss=0.03543, over 3591049.37 frames. ], batch size: 53, lr: 4.68e-03, grad_scale: 4.0 +2023-03-09 17:54:36,422 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85886.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:54:47,538 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.050e+02 2.637e+02 3.133e+02 3.716e+02 1.062e+03, threshold=6.266e+02, percent-clipped=3.0 +2023-03-09 17:54:55,536 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4569, 5.4030, 5.0512, 5.3666, 5.3240, 4.7380, 5.2488, 4.9606], + device='cuda:1'), covar=tensor([0.0400, 0.0436, 0.1265, 0.0717, 0.0628, 0.0436, 0.0423, 0.1091], + device='cuda:1'), in_proj_covar=tensor([0.0511, 0.0579, 0.0728, 0.0448, 0.0474, 0.0528, 0.0561, 0.0701], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 17:55:14,461 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7571, 3.5483, 4.8427, 2.9045, 4.1723, 2.5501, 3.0161, 1.7802], + device='cuda:1'), covar=tensor([0.1246, 0.0966, 0.0184, 0.0980, 0.0585, 0.2595, 0.2577, 0.2248], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0247, 0.0210, 0.0202, 0.0260, 0.0275, 0.0331, 0.0243], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 17:55:15,817 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.34 vs. limit=5.0 +2023-03-09 17:55:27,531 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85930.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 17:55:27,667 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6842, 3.4847, 4.5058, 4.1841, 3.1863, 3.0089, 4.2437, 4.6731], + device='cuda:1'), covar=tensor([0.0780, 0.1344, 0.0326, 0.0403, 0.0950, 0.1157, 0.0405, 0.0390], + device='cuda:1'), in_proj_covar=tensor([0.0154, 0.0282, 0.0165, 0.0186, 0.0197, 0.0196, 0.0199, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 17:55:30,548 INFO [train.py:898] (1/4) Epoch 24, batch 2350, loss[loss=0.1777, simple_loss=0.2745, pruned_loss=0.04048, over 18274.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.2511, pruned_loss=0.03539, over 3592784.90 frames. ], batch size: 57, lr: 4.67e-03, grad_scale: 4.0 +2023-03-09 17:55:34,086 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85936.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:55:58,199 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85957.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:56:03,525 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-09 17:56:19,080 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4861, 2.9598, 4.4525, 3.6362, 2.8458, 4.6162, 3.8886, 3.0858], + device='cuda:1'), covar=tensor([0.0569, 0.1326, 0.0245, 0.0458, 0.1409, 0.0224, 0.0564, 0.0851], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0239, 0.0220, 0.0167, 0.0227, 0.0215, 0.0254, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 17:56:21,435 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5714, 3.5504, 3.4227, 3.0803, 3.3374, 2.7440, 2.6588, 3.6248], + device='cuda:1'), covar=tensor([0.0063, 0.0094, 0.0085, 0.0152, 0.0099, 0.0203, 0.0229, 0.0059], + device='cuda:1'), in_proj_covar=tensor([0.0148, 0.0167, 0.0141, 0.0193, 0.0150, 0.0184, 0.0190, 0.0126], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 17:56:26,825 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85981.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 17:56:28,824 INFO [train.py:898] (1/4) Epoch 24, batch 2400, loss[loss=0.1344, simple_loss=0.2168, pruned_loss=0.02598, over 18552.00 frames. ], tot_loss[loss=0.161, simple_loss=0.251, pruned_loss=0.03552, over 3585470.73 frames. ], batch size: 45, lr: 4.67e-03, grad_scale: 8.0 +2023-03-09 17:56:38,983 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85991.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 17:56:44,284 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.063e+02 2.665e+02 3.240e+02 3.852e+02 9.011e+02, threshold=6.481e+02, percent-clipped=2.0 +2023-03-09 17:56:56,510 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86002.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:57:07,140 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 17:57:12,382 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0407, 5.1324, 5.2185, 4.8584, 4.9440, 4.8749, 5.2337, 5.2387], + device='cuda:1'), covar=tensor([0.0065, 0.0056, 0.0047, 0.0099, 0.0051, 0.0155, 0.0064, 0.0088], + device='cuda:1'), in_proj_covar=tensor([0.0096, 0.0071, 0.0076, 0.0096, 0.0076, 0.0106, 0.0089, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 17:57:15,745 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86018.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:57:16,846 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3745, 2.7624, 2.3049, 2.7534, 3.4255, 3.3359, 2.9085, 2.7146], + device='cuda:1'), covar=tensor([0.0216, 0.0304, 0.0631, 0.0437, 0.0201, 0.0175, 0.0464, 0.0443], + device='cuda:1'), in_proj_covar=tensor([0.0144, 0.0143, 0.0166, 0.0165, 0.0138, 0.0123, 0.0160, 0.0164], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 17:57:32,751 INFO [train.py:898] (1/4) Epoch 24, batch 2450, loss[loss=0.1682, simple_loss=0.2623, pruned_loss=0.03703, over 18313.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.2501, pruned_loss=0.03507, over 3591802.77 frames. ], batch size: 54, lr: 4.67e-03, grad_scale: 8.0 +2023-03-09 17:57:54,702 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86052.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:57:57,429 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-03-09 17:57:58,193 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7772, 3.0517, 4.4650, 3.9101, 2.7905, 4.6956, 4.0254, 3.0185], + device='cuda:1'), covar=tensor([0.0474, 0.1482, 0.0283, 0.0402, 0.1507, 0.0232, 0.0534, 0.0987], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0240, 0.0220, 0.0167, 0.0227, 0.0216, 0.0254, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 17:58:07,267 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86063.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:58:14,881 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86069.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:58:19,430 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.6264, 5.5913, 5.1925, 5.5965, 5.5256, 4.9123, 5.4398, 5.1839], + device='cuda:1'), covar=tensor([0.0427, 0.0432, 0.1353, 0.0700, 0.0641, 0.0466, 0.0457, 0.1092], + device='cuda:1'), in_proj_covar=tensor([0.0509, 0.0577, 0.0724, 0.0445, 0.0474, 0.0527, 0.0558, 0.0698], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 17:58:31,031 INFO [train.py:898] (1/4) Epoch 24, batch 2500, loss[loss=0.1371, simple_loss=0.219, pruned_loss=0.02757, over 18373.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2494, pruned_loss=0.03502, over 3585669.89 frames. ], batch size: 42, lr: 4.67e-03, grad_scale: 4.0 +2023-03-09 17:58:32,306 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86084.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:58:46,793 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.916e+02 2.583e+02 3.100e+02 3.790e+02 6.020e+02, threshold=6.201e+02, percent-clipped=0.0 +2023-03-09 17:59:05,017 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86113.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:59:25,962 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86130.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:59:27,920 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86132.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:59:28,882 INFO [train.py:898] (1/4) Epoch 24, batch 2550, loss[loss=0.1677, simple_loss=0.2587, pruned_loss=0.03832, over 18482.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2502, pruned_loss=0.03539, over 3569414.55 frames. ], batch size: 53, lr: 4.67e-03, grad_scale: 4.0 +2023-03-09 17:59:35,576 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86139.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:00:26,684 INFO [train.py:898] (1/4) Epoch 24, batch 2600, loss[loss=0.1394, simple_loss=0.2247, pruned_loss=0.02709, over 18400.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.2503, pruned_loss=0.03525, over 3577569.23 frames. ], batch size: 42, lr: 4.67e-03, grad_scale: 4.0 +2023-03-09 18:00:30,410 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86186.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:00:31,346 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86187.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:00:42,998 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.777e+02 2.596e+02 3.106e+02 3.737e+02 7.665e+02, threshold=6.211e+02, percent-clipped=2.0 +2023-03-09 18:00:45,808 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 18:01:23,335 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86231.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:01:25,436 INFO [train.py:898] (1/4) Epoch 24, batch 2650, loss[loss=0.1537, simple_loss=0.2419, pruned_loss=0.03276, over 18406.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.251, pruned_loss=0.03559, over 3574232.90 frames. ], batch size: 48, lr: 4.67e-03, grad_scale: 4.0 +2023-03-09 18:01:26,598 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86234.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:01:40,393 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86246.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:01:58,090 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8980, 3.3585, 4.7128, 3.9247, 3.0528, 4.9341, 4.1550, 3.3416], + device='cuda:1'), covar=tensor([0.0442, 0.1186, 0.0231, 0.0445, 0.1319, 0.0213, 0.0509, 0.0808], + device='cuda:1'), in_proj_covar=tensor([0.0215, 0.0239, 0.0220, 0.0167, 0.0226, 0.0216, 0.0253, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 18:02:21,424 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9145, 4.9620, 5.0393, 4.7349, 4.7999, 4.7285, 5.0376, 5.0512], + device='cuda:1'), covar=tensor([0.0075, 0.0069, 0.0053, 0.0123, 0.0061, 0.0185, 0.0101, 0.0097], + device='cuda:1'), in_proj_covar=tensor([0.0097, 0.0071, 0.0077, 0.0096, 0.0077, 0.0106, 0.0089, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 18:02:22,541 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86281.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:02:24,584 INFO [train.py:898] (1/4) Epoch 24, batch 2700, loss[loss=0.1782, simple_loss=0.2715, pruned_loss=0.04242, over 17971.00 frames. ], tot_loss[loss=0.1614, simple_loss=0.2512, pruned_loss=0.03573, over 3549466.66 frames. ], batch size: 65, lr: 4.66e-03, grad_scale: 4.0 +2023-03-09 18:02:28,152 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86286.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 18:02:41,076 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.842e+02 2.486e+02 2.896e+02 3.686e+02 7.740e+02, threshold=5.792e+02, percent-clipped=4.0 +2023-03-09 18:02:52,841 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86307.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:02:59,523 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86313.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:03:18,481 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86329.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:03:18,540 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1232, 5.1610, 5.3598, 5.4863, 5.0637, 5.9654, 5.6036, 5.2208], + device='cuda:1'), covar=tensor([0.1154, 0.0674, 0.0777, 0.0774, 0.1420, 0.0732, 0.0626, 0.1574], + device='cuda:1'), in_proj_covar=tensor([0.0365, 0.0297, 0.0316, 0.0325, 0.0336, 0.0431, 0.0293, 0.0428], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 18:03:23,301 INFO [train.py:898] (1/4) Epoch 24, batch 2750, loss[loss=0.1423, simple_loss=0.2325, pruned_loss=0.02605, over 18496.00 frames. ], tot_loss[loss=0.1619, simple_loss=0.2522, pruned_loss=0.03581, over 3559280.28 frames. ], batch size: 47, lr: 4.66e-03, grad_scale: 4.0 +2023-03-09 18:03:25,934 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0949, 2.5864, 3.2984, 3.0887, 2.4523, 3.4356, 3.2887, 2.5298], + device='cuda:1'), covar=tensor([0.0513, 0.1208, 0.0446, 0.0419, 0.1365, 0.0319, 0.0635, 0.0887], + device='cuda:1'), in_proj_covar=tensor([0.0214, 0.0237, 0.0220, 0.0167, 0.0226, 0.0215, 0.0252, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 18:03:52,827 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86358.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:04:21,055 INFO [train.py:898] (1/4) Epoch 24, batch 2800, loss[loss=0.1794, simple_loss=0.2734, pruned_loss=0.04273, over 16167.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2522, pruned_loss=0.03564, over 3558658.18 frames. ], batch size: 94, lr: 4.66e-03, grad_scale: 8.0 +2023-03-09 18:04:37,472 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.618e+02 2.577e+02 3.017e+02 3.554e+02 5.358e+02, threshold=6.034e+02, percent-clipped=0.0 +2023-03-09 18:04:48,797 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1479, 3.4328, 3.3484, 2.8853, 3.0541, 2.8456, 2.3949, 2.3687], + device='cuda:1'), covar=tensor([0.0254, 0.0192, 0.0151, 0.0318, 0.0328, 0.0252, 0.0657, 0.0708], + device='cuda:1'), in_proj_covar=tensor([0.0072, 0.0062, 0.0065, 0.0069, 0.0091, 0.0068, 0.0078, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 18:04:50,983 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86408.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:05:10,657 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86425.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:05:19,741 INFO [train.py:898] (1/4) Epoch 24, batch 2850, loss[loss=0.1576, simple_loss=0.2503, pruned_loss=0.03243, over 18494.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2519, pruned_loss=0.03554, over 3547520.88 frames. ], batch size: 59, lr: 4.66e-03, grad_scale: 8.0 +2023-03-09 18:05:29,147 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86440.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:05:34,654 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86445.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:05:58,986 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-03-09 18:06:18,032 INFO [train.py:898] (1/4) Epoch 24, batch 2900, loss[loss=0.1596, simple_loss=0.2515, pruned_loss=0.03385, over 17167.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.2511, pruned_loss=0.03536, over 3553709.24 frames. ], batch size: 78, lr: 4.66e-03, grad_scale: 4.0 +2023-03-09 18:06:22,891 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.46 vs. limit=5.0 +2023-03-09 18:06:36,141 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.815e+02 2.792e+02 3.220e+02 3.955e+02 8.779e+02, threshold=6.440e+02, percent-clipped=4.0 +2023-03-09 18:06:39,830 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86501.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:06:45,452 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86506.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 18:07:14,347 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86531.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:07:14,435 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6215, 2.7611, 2.5404, 2.8968, 3.6097, 3.5707, 3.1267, 2.9331], + device='cuda:1'), covar=tensor([0.0192, 0.0322, 0.0538, 0.0393, 0.0192, 0.0152, 0.0407, 0.0420], + device='cuda:1'), in_proj_covar=tensor([0.0143, 0.0144, 0.0166, 0.0165, 0.0138, 0.0123, 0.0160, 0.0164], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 18:07:16,263 INFO [train.py:898] (1/4) Epoch 24, batch 2950, loss[loss=0.2025, simple_loss=0.2816, pruned_loss=0.06167, over 12394.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2504, pruned_loss=0.03499, over 3558732.30 frames. ], batch size: 129, lr: 4.66e-03, grad_scale: 4.0 +2023-03-09 18:07:33,507 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6898, 3.6063, 2.4543, 4.4495, 3.2664, 4.3280, 2.7311, 4.0705], + device='cuda:1'), covar=tensor([0.0677, 0.0796, 0.1381, 0.0530, 0.0806, 0.0312, 0.1020, 0.0392], + device='cuda:1'), in_proj_covar=tensor([0.0221, 0.0230, 0.0194, 0.0293, 0.0197, 0.0274, 0.0205, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 18:08:10,865 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86579.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:08:12,178 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86580.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:08:15,185 INFO [train.py:898] (1/4) Epoch 24, batch 3000, loss[loss=0.1894, simple_loss=0.2687, pruned_loss=0.05512, over 12454.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2506, pruned_loss=0.03495, over 3549891.95 frames. ], batch size: 130, lr: 4.66e-03, grad_scale: 4.0 +2023-03-09 18:08:15,185 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 18:08:27,161 INFO [train.py:932] (1/4) Epoch 24, validation: loss=0.1501, simple_loss=0.2489, pruned_loss=0.02569, over 944034.00 frames. +2023-03-09 18:08:27,162 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 18:08:31,388 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86586.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 18:08:44,051 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-09 18:08:44,134 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-03-09 18:08:44,563 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.708e+02 2.585e+02 3.043e+02 3.671e+02 7.667e+02, threshold=6.086e+02, percent-clipped=3.0 +2023-03-09 18:08:49,601 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86602.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:09:00,637 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6468, 2.3872, 2.5747, 2.6993, 3.2384, 4.8710, 4.8209, 3.2803], + device='cuda:1'), covar=tensor([0.2014, 0.2537, 0.3185, 0.1922, 0.2411, 0.0249, 0.0332, 0.1055], + device='cuda:1'), in_proj_covar=tensor([0.0313, 0.0354, 0.0395, 0.0285, 0.0394, 0.0254, 0.0299, 0.0265], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 18:09:02,660 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86613.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:09:06,729 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9818, 5.4176, 5.3938, 5.4107, 4.8700, 5.2991, 4.7424, 5.3262], + device='cuda:1'), covar=tensor([0.0251, 0.0308, 0.0190, 0.0343, 0.0391, 0.0248, 0.1119, 0.0295], + device='cuda:1'), in_proj_covar=tensor([0.0222, 0.0269, 0.0264, 0.0344, 0.0279, 0.0275, 0.0312, 0.0267], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 18:09:26,131 INFO [train.py:898] (1/4) Epoch 24, batch 3050, loss[loss=0.1699, simple_loss=0.2642, pruned_loss=0.03781, over 18453.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.25, pruned_loss=0.03475, over 3559107.41 frames. ], batch size: 59, lr: 4.66e-03, grad_scale: 4.0 +2023-03-09 18:09:27,430 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86634.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 18:09:35,949 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86641.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:09:55,819 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86658.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:09:59,254 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86661.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:10:24,347 INFO [train.py:898] (1/4) Epoch 24, batch 3100, loss[loss=0.1584, simple_loss=0.2393, pruned_loss=0.03873, over 17681.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2506, pruned_loss=0.03502, over 3561600.31 frames. ], batch size: 39, lr: 4.65e-03, grad_scale: 4.0 +2023-03-09 18:10:41,642 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.778e+02 2.436e+02 2.944e+02 3.607e+02 6.796e+02, threshold=5.887e+02, percent-clipped=2.0 +2023-03-09 18:10:51,066 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86706.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:10:54,032 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86708.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:11:13,483 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86725.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:11:22,864 INFO [train.py:898] (1/4) Epoch 24, batch 3150, loss[loss=0.1565, simple_loss=0.2515, pruned_loss=0.03079, over 18555.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2505, pruned_loss=0.03474, over 3564770.08 frames. ], batch size: 54, lr: 4.65e-03, grad_scale: 4.0 +2023-03-09 18:11:49,564 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86756.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:12:09,639 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86773.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:12:13,743 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-09 18:12:20,952 INFO [train.py:898] (1/4) Epoch 24, batch 3200, loss[loss=0.1544, simple_loss=0.2514, pruned_loss=0.02866, over 18389.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.2502, pruned_loss=0.03506, over 3562587.26 frames. ], batch size: 52, lr: 4.65e-03, grad_scale: 8.0 +2023-03-09 18:12:22,592 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.4715, 2.2345, 2.3280, 2.3759, 2.8207, 4.5797, 4.5001, 3.3345], + device='cuda:1'), covar=tensor([0.2298, 0.3091, 0.3692, 0.2400, 0.3421, 0.0360, 0.0450, 0.0991], + device='cuda:1'), in_proj_covar=tensor([0.0313, 0.0354, 0.0394, 0.0284, 0.0394, 0.0254, 0.0300, 0.0265], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 18:12:35,982 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86796.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:12:37,983 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.596e+02 2.507e+02 2.980e+02 3.406e+02 8.664e+02, threshold=5.960e+02, percent-clipped=4.0 +2023-03-09 18:12:42,543 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86801.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 18:12:57,945 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86814.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:13:19,213 INFO [train.py:898] (1/4) Epoch 24, batch 3250, loss[loss=0.1457, simple_loss=0.2318, pruned_loss=0.02977, over 18511.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2495, pruned_loss=0.03519, over 3575043.34 frames. ], batch size: 47, lr: 4.65e-03, grad_scale: 8.0 +2023-03-09 18:13:51,282 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7699, 5.2347, 5.2168, 5.1973, 4.6811, 5.0737, 4.6171, 5.1321], + device='cuda:1'), covar=tensor([0.0263, 0.0319, 0.0187, 0.0439, 0.0402, 0.0237, 0.1028, 0.0300], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0273, 0.0268, 0.0350, 0.0284, 0.0279, 0.0317, 0.0272], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0006, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 18:14:08,834 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86875.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:14:12,163 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86878.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:14:14,994 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3696, 5.3385, 4.9761, 5.2891, 5.2842, 4.7407, 5.2156, 4.9449], + device='cuda:1'), covar=tensor([0.0445, 0.0435, 0.1411, 0.0789, 0.0660, 0.0400, 0.0434, 0.1054], + device='cuda:1'), in_proj_covar=tensor([0.0513, 0.0579, 0.0725, 0.0445, 0.0476, 0.0530, 0.0562, 0.0696], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 18:14:18,045 INFO [train.py:898] (1/4) Epoch 24, batch 3300, loss[loss=0.1728, simple_loss=0.2621, pruned_loss=0.04177, over 18301.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2498, pruned_loss=0.03531, over 3574250.57 frames. ], batch size: 57, lr: 4.65e-03, grad_scale: 8.0 +2023-03-09 18:14:35,713 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.783e+02 2.678e+02 3.309e+02 3.868e+02 1.091e+03, threshold=6.618e+02, percent-clipped=4.0 +2023-03-09 18:14:40,517 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86902.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:14:50,221 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.24 vs. limit=5.0 +2023-03-09 18:15:16,761 INFO [train.py:898] (1/4) Epoch 24, batch 3350, loss[loss=0.1567, simple_loss=0.2433, pruned_loss=0.03509, over 18289.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.2498, pruned_loss=0.03499, over 3582105.45 frames. ], batch size: 57, lr: 4.65e-03, grad_scale: 8.0 +2023-03-09 18:15:20,492 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86936.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:15:24,147 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86939.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:15:36,561 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86950.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:16:14,777 INFO [train.py:898] (1/4) Epoch 24, batch 3400, loss[loss=0.1645, simple_loss=0.2565, pruned_loss=0.03627, over 18505.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.2496, pruned_loss=0.03472, over 3581573.78 frames. ], batch size: 51, lr: 4.65e-03, grad_scale: 8.0 +2023-03-09 18:16:32,670 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.872e+02 2.525e+02 2.880e+02 3.383e+02 6.366e+02, threshold=5.759e+02, percent-clipped=0.0 +2023-03-09 18:16:38,093 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87002.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 18:17:11,513 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8024, 5.1031, 5.0335, 5.0941, 4.8740, 5.6224, 5.2238, 5.0052], + device='cuda:1'), covar=tensor([0.1185, 0.0751, 0.0799, 0.0857, 0.1226, 0.0834, 0.0790, 0.1520], + device='cuda:1'), in_proj_covar=tensor([0.0362, 0.0295, 0.0319, 0.0323, 0.0330, 0.0433, 0.0288, 0.0424], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 18:17:13,559 INFO [train.py:898] (1/4) Epoch 24, batch 3450, loss[loss=0.1735, simple_loss=0.2596, pruned_loss=0.04369, over 18269.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2499, pruned_loss=0.03502, over 3571223.34 frames. ], batch size: 57, lr: 4.64e-03, grad_scale: 8.0 +2023-03-09 18:17:48,473 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87063.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 18:17:56,848 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7704, 4.0057, 2.4432, 3.8578, 5.0567, 2.4826, 3.6048, 3.9810], + device='cuda:1'), covar=tensor([0.0199, 0.1155, 0.1657, 0.0704, 0.0094, 0.1314, 0.0744, 0.0671], + device='cuda:1'), in_proj_covar=tensor([0.0174, 0.0273, 0.0205, 0.0199, 0.0134, 0.0185, 0.0217, 0.0225], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 18:17:59,380 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-09 18:18:03,479 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8255, 3.6424, 4.9563, 4.3213, 3.2800, 3.0954, 4.2969, 5.1568], + device='cuda:1'), covar=tensor([0.0774, 0.1485, 0.0198, 0.0434, 0.1003, 0.1166, 0.0417, 0.0223], + device='cuda:1'), in_proj_covar=tensor([0.0152, 0.0280, 0.0165, 0.0185, 0.0194, 0.0193, 0.0197, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 18:18:11,252 INFO [train.py:898] (1/4) Epoch 24, batch 3500, loss[loss=0.1423, simple_loss=0.2246, pruned_loss=0.03002, over 17657.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2498, pruned_loss=0.03467, over 3585772.78 frames. ], batch size: 39, lr: 4.64e-03, grad_scale: 8.0 +2023-03-09 18:18:26,433 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87096.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:18:28,425 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.855e+02 2.643e+02 3.056e+02 3.679e+02 7.050e+02, threshold=6.112e+02, percent-clipped=3.0 +2023-03-09 18:18:32,033 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87101.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 18:19:06,668 INFO [train.py:898] (1/4) Epoch 24, batch 3550, loss[loss=0.1441, simple_loss=0.2303, pruned_loss=0.02901, over 18408.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.2497, pruned_loss=0.0349, over 3601954.20 frames. ], batch size: 48, lr: 4.64e-03, grad_scale: 8.0 +2023-03-09 18:19:18,569 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87144.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:19:23,969 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87149.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:19:46,345 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87170.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:19:54,776 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.77 vs. limit=5.0 +2023-03-09 18:19:58,729 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6377, 3.6495, 3.4907, 3.0994, 3.4247, 2.7629, 2.8149, 3.7223], + device='cuda:1'), covar=tensor([0.0067, 0.0085, 0.0087, 0.0139, 0.0094, 0.0189, 0.0203, 0.0063], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0168, 0.0142, 0.0193, 0.0149, 0.0184, 0.0189, 0.0127], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 18:20:00,496 INFO [train.py:898] (1/4) Epoch 24, batch 3600, loss[loss=0.1799, simple_loss=0.2672, pruned_loss=0.04627, over 12140.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.2496, pruned_loss=0.03496, over 3596925.01 frames. ], batch size: 129, lr: 4.64e-03, grad_scale: 8.0 +2023-03-09 18:20:16,547 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.830e+02 2.465e+02 3.001e+02 3.686e+02 6.054e+02, threshold=6.002e+02, percent-clipped=0.0 +2023-03-09 18:21:02,998 INFO [train.py:898] (1/4) Epoch 25, batch 0, loss[loss=0.15, simple_loss=0.2442, pruned_loss=0.02787, over 18556.00 frames. ], tot_loss[loss=0.15, simple_loss=0.2442, pruned_loss=0.02787, over 18556.00 frames. ], batch size: 54, lr: 4.54e-03, grad_scale: 8.0 +2023-03-09 18:21:02,998 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 18:21:14,846 INFO [train.py:932] (1/4) Epoch 25, validation: loss=0.1499, simple_loss=0.2489, pruned_loss=0.0255, over 944034.00 frames. +2023-03-09 18:21:14,847 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 18:21:30,499 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87230.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:21:32,957 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0623, 3.8172, 5.2173, 3.0927, 4.5437, 2.6978, 3.1716, 1.9253], + device='cuda:1'), covar=tensor([0.1088, 0.0968, 0.0134, 0.0942, 0.0473, 0.2714, 0.2801, 0.2326], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0248, 0.0213, 0.0203, 0.0262, 0.0277, 0.0331, 0.0243], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 18:21:32,990 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7908, 2.4077, 2.7216, 2.7348, 3.3048, 4.8483, 4.8176, 3.4115], + device='cuda:1'), covar=tensor([0.1939, 0.2495, 0.2868, 0.1934, 0.2367, 0.0243, 0.0360, 0.1017], + device='cuda:1'), in_proj_covar=tensor([0.0312, 0.0352, 0.0391, 0.0282, 0.0389, 0.0252, 0.0297, 0.0263], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 18:21:34,867 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87234.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:21:37,259 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87236.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:22:14,604 INFO [train.py:898] (1/4) Epoch 25, batch 50, loss[loss=0.1576, simple_loss=0.2571, pruned_loss=0.02906, over 18615.00 frames. ], tot_loss[loss=0.1592, simple_loss=0.2494, pruned_loss=0.03448, over 820032.43 frames. ], batch size: 52, lr: 4.54e-03, grad_scale: 8.0 +2023-03-09 18:22:34,834 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87284.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:22:43,682 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87291.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:22:52,234 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.879e+02 2.489e+02 2.896e+02 3.619e+02 7.206e+02, threshold=5.791e+02, percent-clipped=1.0 +2023-03-09 18:23:14,378 INFO [train.py:898] (1/4) Epoch 25, batch 100, loss[loss=0.1372, simple_loss=0.2202, pruned_loss=0.02706, over 18403.00 frames. ], tot_loss[loss=0.1584, simple_loss=0.2482, pruned_loss=0.03423, over 1439854.71 frames. ], batch size: 42, lr: 4.54e-03, grad_scale: 8.0 +2023-03-09 18:23:19,338 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9064, 4.5858, 4.6247, 3.5107, 3.7411, 3.4691, 2.7987, 2.6572], + device='cuda:1'), covar=tensor([0.0213, 0.0143, 0.0077, 0.0292, 0.0325, 0.0220, 0.0680, 0.0776], + device='cuda:1'), in_proj_covar=tensor([0.0073, 0.0062, 0.0065, 0.0070, 0.0092, 0.0068, 0.0078, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 18:23:22,042 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 18:23:56,891 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-03-09 18:24:03,154 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87358.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 18:24:08,331 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87362.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:24:13,623 INFO [train.py:898] (1/4) Epoch 25, batch 150, loss[loss=0.1814, simple_loss=0.2714, pruned_loss=0.0457, over 18247.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2496, pruned_loss=0.03451, over 1919387.17 frames. ], batch size: 57, lr: 4.54e-03, grad_scale: 8.0 +2023-03-09 18:24:49,025 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.772e+02 2.588e+02 3.217e+02 3.788e+02 6.268e+02, threshold=6.435e+02, percent-clipped=1.0 +2023-03-09 18:25:12,504 INFO [train.py:898] (1/4) Epoch 25, batch 200, loss[loss=0.1651, simple_loss=0.2637, pruned_loss=0.03323, over 18494.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.251, pruned_loss=0.03433, over 2298204.22 frames. ], batch size: 53, lr: 4.54e-03, grad_scale: 8.0 +2023-03-09 18:25:19,904 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87423.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:25:32,395 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-03-09 18:25:37,046 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1166, 5.1954, 4.4650, 5.0274, 5.1467, 4.6662, 4.9956, 4.6133], + device='cuda:1'), covar=tensor([0.0800, 0.0745, 0.2454, 0.1258, 0.0836, 0.0576, 0.0782, 0.1508], + device='cuda:1'), in_proj_covar=tensor([0.0508, 0.0577, 0.0718, 0.0443, 0.0471, 0.0526, 0.0558, 0.0690], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 18:25:42,679 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9297, 4.9862, 5.0488, 4.7024, 4.7793, 4.7791, 5.0762, 5.0834], + device='cuda:1'), covar=tensor([0.0069, 0.0067, 0.0053, 0.0116, 0.0057, 0.0153, 0.0092, 0.0096], + device='cuda:1'), in_proj_covar=tensor([0.0098, 0.0072, 0.0078, 0.0096, 0.0077, 0.0107, 0.0090, 0.0089], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 18:25:56,393 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4334, 5.4228, 5.7406, 5.7482, 5.2851, 6.2457, 5.8861, 5.5060], + device='cuda:1'), covar=tensor([0.1114, 0.0567, 0.0630, 0.0798, 0.1427, 0.0681, 0.0594, 0.1563], + device='cuda:1'), in_proj_covar=tensor([0.0360, 0.0294, 0.0317, 0.0322, 0.0328, 0.0430, 0.0287, 0.0422], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 18:26:04,030 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87461.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:26:10,404 INFO [train.py:898] (1/4) Epoch 25, batch 250, loss[loss=0.1427, simple_loss=0.232, pruned_loss=0.02668, over 18496.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.2498, pruned_loss=0.0339, over 2592099.39 frames. ], batch size: 47, lr: 4.54e-03, grad_scale: 8.0 +2023-03-09 18:26:14,743 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87470.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:26:18,696 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-09 18:26:46,938 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.865e+02 2.586e+02 2.892e+02 3.324e+02 7.017e+02, threshold=5.784e+02, percent-clipped=1.0 +2023-03-09 18:26:55,109 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87505.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:27:09,407 INFO [train.py:898] (1/4) Epoch 25, batch 300, loss[loss=0.1645, simple_loss=0.2597, pruned_loss=0.03464, over 18441.00 frames. ], tot_loss[loss=0.1585, simple_loss=0.2494, pruned_loss=0.03379, over 2813230.64 frames. ], batch size: 59, lr: 4.54e-03, grad_scale: 8.0 +2023-03-09 18:27:10,680 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87518.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:27:16,044 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87522.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:27:29,486 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87534.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:27:53,214 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-09 18:28:07,723 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87566.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 18:28:08,432 INFO [train.py:898] (1/4) Epoch 25, batch 350, loss[loss=0.146, simple_loss=0.2414, pruned_loss=0.02528, over 18387.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2499, pruned_loss=0.03399, over 2981063.65 frames. ], batch size: 52, lr: 4.53e-03, grad_scale: 8.0 +2023-03-09 18:28:26,402 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87582.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:28:28,892 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87584.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:28:30,949 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87586.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:28:34,513 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87589.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:28:44,690 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.762e+02 2.689e+02 3.189e+02 3.817e+02 6.961e+02, threshold=6.379e+02, percent-clipped=1.0 +2023-03-09 18:28:53,206 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87605.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:29:06,925 INFO [train.py:898] (1/4) Epoch 25, batch 400, loss[loss=0.1348, simple_loss=0.2188, pruned_loss=0.02533, over 18180.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2487, pruned_loss=0.03388, over 3116371.50 frames. ], batch size: 44, lr: 4.53e-03, grad_scale: 8.0 +2023-03-09 18:29:22,194 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6079, 3.5910, 3.4829, 3.1101, 3.3456, 2.7532, 2.7437, 3.6191], + device='cuda:1'), covar=tensor([0.0062, 0.0096, 0.0086, 0.0138, 0.0093, 0.0190, 0.0210, 0.0066], + device='cuda:1'), in_proj_covar=tensor([0.0149, 0.0167, 0.0141, 0.0191, 0.0148, 0.0183, 0.0188, 0.0127], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 18:29:34,816 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8834, 3.6025, 5.0481, 4.4688, 3.5157, 3.0691, 4.4428, 5.2689], + device='cuda:1'), covar=tensor([0.0785, 0.1479, 0.0196, 0.0395, 0.0852, 0.1233, 0.0379, 0.0241], + device='cuda:1'), in_proj_covar=tensor([0.0151, 0.0278, 0.0164, 0.0184, 0.0193, 0.0193, 0.0196, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 18:29:40,386 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87645.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 18:29:46,514 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87650.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:29:48,046 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 18:29:55,211 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87658.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 18:30:04,398 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87666.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 18:30:05,646 INFO [train.py:898] (1/4) Epoch 25, batch 450, loss[loss=0.1753, simple_loss=0.2703, pruned_loss=0.04016, over 16466.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.2493, pruned_loss=0.03411, over 3219046.43 frames. ], batch size: 94, lr: 4.53e-03, grad_scale: 8.0 +2023-03-09 18:30:13,266 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7955, 4.8037, 2.4621, 4.6515, 4.5294, 4.7562, 4.5116, 2.3335], + device='cuda:1'), covar=tensor([0.0267, 0.0116, 0.1014, 0.0144, 0.0112, 0.0140, 0.0177, 0.1544], + device='cuda:1'), in_proj_covar=tensor([0.0089, 0.0082, 0.0096, 0.0096, 0.0087, 0.0077, 0.0085, 0.0097], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 18:30:23,244 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-09 18:30:33,106 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9125, 5.4062, 5.3544, 5.3512, 4.8584, 5.2620, 4.7598, 5.2929], + device='cuda:1'), covar=tensor([0.0221, 0.0231, 0.0189, 0.0428, 0.0358, 0.0210, 0.0976, 0.0238], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0271, 0.0267, 0.0346, 0.0280, 0.0276, 0.0312, 0.0268], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 18:30:36,668 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6403, 3.0428, 4.4607, 3.8438, 2.8730, 4.6798, 3.9729, 2.9103], + device='cuda:1'), covar=tensor([0.0563, 0.1469, 0.0280, 0.0432, 0.1524, 0.0204, 0.0638, 0.1005], + device='cuda:1'), in_proj_covar=tensor([0.0219, 0.0245, 0.0226, 0.0170, 0.0230, 0.0220, 0.0257, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 18:30:36,688 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6774, 3.4165, 2.3703, 4.4403, 3.1450, 4.1819, 2.5266, 3.9711], + device='cuda:1'), covar=tensor([0.0596, 0.0847, 0.1394, 0.0454, 0.0785, 0.0323, 0.1168, 0.0373], + device='cuda:1'), in_proj_covar=tensor([0.0220, 0.0229, 0.0194, 0.0292, 0.0196, 0.0271, 0.0205, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 18:30:42,033 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.987e+02 2.570e+02 3.208e+02 4.155e+02 1.022e+03, threshold=6.417e+02, percent-clipped=4.0 +2023-03-09 18:30:51,850 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87706.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 18:31:03,995 INFO [train.py:898] (1/4) Epoch 25, batch 500, loss[loss=0.1924, simple_loss=0.2871, pruned_loss=0.04885, over 18351.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2498, pruned_loss=0.03449, over 3297739.85 frames. ], batch size: 56, lr: 4.53e-03, grad_scale: 8.0 +2023-03-09 18:31:05,390 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87718.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:31:10,437 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4702, 5.9651, 5.5134, 5.7499, 5.5517, 5.3933, 6.0150, 5.9600], + device='cuda:1'), covar=tensor([0.1117, 0.0800, 0.0481, 0.0764, 0.1393, 0.0742, 0.0608, 0.0787], + device='cuda:1'), in_proj_covar=tensor([0.0624, 0.0545, 0.0394, 0.0574, 0.0770, 0.0568, 0.0781, 0.0597], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 18:31:20,216 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-09 18:31:34,970 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.66 vs. limit=2.0 +2023-03-09 18:31:37,992 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7677, 4.7543, 4.4457, 4.6595, 4.7025, 4.1909, 4.6175, 4.4319], + device='cuda:1'), covar=tensor([0.0458, 0.0488, 0.1288, 0.0807, 0.0613, 0.0470, 0.0498, 0.1115], + device='cuda:1'), in_proj_covar=tensor([0.0509, 0.0576, 0.0724, 0.0448, 0.0469, 0.0527, 0.0561, 0.0693], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 18:32:02,463 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.82 vs. limit=5.0 +2023-03-09 18:32:03,144 INFO [train.py:898] (1/4) Epoch 25, batch 550, loss[loss=0.193, simple_loss=0.2653, pruned_loss=0.06038, over 12943.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.2499, pruned_loss=0.03449, over 3361855.87 frames. ], batch size: 129, lr: 4.53e-03, grad_scale: 8.0 +2023-03-09 18:32:15,773 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-03-09 18:32:39,325 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.934e+02 2.781e+02 3.111e+02 3.709e+02 9.396e+02, threshold=6.222e+02, percent-clipped=2.0 +2023-03-09 18:33:02,206 INFO [train.py:898] (1/4) Epoch 25, batch 600, loss[loss=0.177, simple_loss=0.2651, pruned_loss=0.04447, over 17257.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2505, pruned_loss=0.03448, over 3408787.28 frames. ], batch size: 78, lr: 4.53e-03, grad_scale: 8.0 +2023-03-09 18:33:02,392 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87817.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:33:22,297 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6054, 2.8745, 4.3070, 3.7038, 2.7487, 4.5640, 3.8730, 2.9247], + device='cuda:1'), covar=tensor([0.0548, 0.1487, 0.0295, 0.0463, 0.1570, 0.0213, 0.0569, 0.0899], + device='cuda:1'), in_proj_covar=tensor([0.0218, 0.0244, 0.0225, 0.0170, 0.0228, 0.0219, 0.0255, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 18:33:54,585 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87861.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 18:34:01,454 INFO [train.py:898] (1/4) Epoch 25, batch 650, loss[loss=0.1424, simple_loss=0.2316, pruned_loss=0.02662, over 18385.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2505, pruned_loss=0.03446, over 3457569.26 frames. ], batch size: 46, lr: 4.53e-03, grad_scale: 8.0 +2023-03-09 18:34:21,749 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9368, 3.9097, 3.9918, 3.7823, 3.8156, 3.8633, 4.0011, 4.0452], + device='cuda:1'), covar=tensor([0.0089, 0.0077, 0.0075, 0.0105, 0.0086, 0.0131, 0.0079, 0.0087], + device='cuda:1'), in_proj_covar=tensor([0.0097, 0.0071, 0.0077, 0.0095, 0.0077, 0.0106, 0.0090, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 18:34:23,966 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87886.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:34:38,405 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.755e+02 2.498e+02 2.822e+02 3.446e+02 7.564e+02, threshold=5.643e+02, percent-clipped=1.0 +2023-03-09 18:35:00,457 INFO [train.py:898] (1/4) Epoch 25, batch 700, loss[loss=0.1619, simple_loss=0.2453, pruned_loss=0.0393, over 18395.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.2509, pruned_loss=0.03489, over 3478417.31 frames. ], batch size: 48, lr: 4.53e-03, grad_scale: 8.0 +2023-03-09 18:35:04,541 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 18:35:19,985 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87934.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:35:20,366 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.10 vs. limit=5.0 +2023-03-09 18:35:24,254 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5052, 3.2822, 2.2205, 4.2842, 2.9167, 4.0084, 2.3272, 3.7646], + device='cuda:1'), covar=tensor([0.0696, 0.0933, 0.1616, 0.0522, 0.1032, 0.0344, 0.1368, 0.0461], + device='cuda:1'), in_proj_covar=tensor([0.0221, 0.0230, 0.0194, 0.0293, 0.0196, 0.0271, 0.0206, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 18:35:28,941 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87940.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 18:35:34,670 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87945.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:35:52,402 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87961.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 18:35:59,571 INFO [train.py:898] (1/4) Epoch 25, batch 750, loss[loss=0.1427, simple_loss=0.229, pruned_loss=0.02825, over 18261.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2508, pruned_loss=0.0349, over 3504186.16 frames. ], batch size: 47, lr: 4.52e-03, grad_scale: 8.0 +2023-03-09 18:36:35,769 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.893e+02 2.719e+02 3.180e+02 3.674e+02 1.066e+03, threshold=6.360e+02, percent-clipped=6.0 +2023-03-09 18:36:52,437 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-09 18:36:59,853 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7375, 2.8629, 2.5393, 3.0466, 3.7407, 3.6406, 3.2145, 2.9344], + device='cuda:1'), covar=tensor([0.0192, 0.0333, 0.0673, 0.0334, 0.0183, 0.0167, 0.0389, 0.0418], + device='cuda:1'), in_proj_covar=tensor([0.0142, 0.0143, 0.0166, 0.0164, 0.0136, 0.0123, 0.0159, 0.0163], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 18:37:01,709 INFO [train.py:898] (1/4) Epoch 25, batch 800, loss[loss=0.1536, simple_loss=0.2425, pruned_loss=0.03232, over 18588.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2506, pruned_loss=0.03499, over 3527445.34 frames. ], batch size: 54, lr: 4.52e-03, grad_scale: 8.0 +2023-03-09 18:37:03,678 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88018.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:37:10,375 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8917, 3.3623, 4.5484, 3.9515, 3.0043, 4.8889, 4.1522, 3.1604], + device='cuda:1'), covar=tensor([0.0444, 0.1211, 0.0306, 0.0436, 0.1435, 0.0198, 0.0510, 0.0873], + device='cuda:1'), in_proj_covar=tensor([0.0219, 0.0245, 0.0228, 0.0171, 0.0229, 0.0220, 0.0257, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 18:37:58,657 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88066.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:37:59,607 INFO [train.py:898] (1/4) Epoch 25, batch 850, loss[loss=0.1349, simple_loss=0.2199, pruned_loss=0.02498, over 18386.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.2501, pruned_loss=0.03478, over 3528795.67 frames. ], batch size: 42, lr: 4.52e-03, grad_scale: 8.0 +2023-03-09 18:38:25,162 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7836, 3.5771, 2.5492, 4.5036, 3.2814, 4.2888, 2.6911, 4.1896], + device='cuda:1'), covar=tensor([0.0624, 0.0830, 0.1338, 0.0502, 0.0857, 0.0387, 0.1168, 0.0370], + device='cuda:1'), in_proj_covar=tensor([0.0219, 0.0228, 0.0192, 0.0290, 0.0194, 0.0269, 0.0203, 0.0204], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 18:38:35,403 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.735e+02 2.499e+02 3.015e+02 3.591e+02 1.108e+03, threshold=6.031e+02, percent-clipped=1.0 +2023-03-09 18:38:39,183 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88100.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:38:50,732 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.82 vs. limit=5.0 +2023-03-09 18:38:58,092 INFO [train.py:898] (1/4) Epoch 25, batch 900, loss[loss=0.1428, simple_loss=0.226, pruned_loss=0.0298, over 18558.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.2499, pruned_loss=0.03458, over 3541976.78 frames. ], batch size: 45, lr: 4.52e-03, grad_scale: 8.0 +2023-03-09 18:38:58,443 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88117.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:39:39,236 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0682, 5.4762, 5.4766, 5.4529, 4.9929, 5.4279, 4.8748, 5.3594], + device='cuda:1'), covar=tensor([0.0220, 0.0277, 0.0169, 0.0384, 0.0394, 0.0187, 0.1007, 0.0301], + device='cuda:1'), in_proj_covar=tensor([0.0227, 0.0273, 0.0270, 0.0349, 0.0283, 0.0279, 0.0315, 0.0271], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 18:39:50,469 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88161.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 18:39:50,580 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88161.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:39:55,052 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88165.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:39:57,055 INFO [train.py:898] (1/4) Epoch 25, batch 950, loss[loss=0.1504, simple_loss=0.2324, pruned_loss=0.03424, over 18564.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.2492, pruned_loss=0.03425, over 3566932.29 frames. ], batch size: 45, lr: 4.52e-03, grad_scale: 8.0 +2023-03-09 18:40:33,070 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.747e+02 2.618e+02 3.009e+02 3.619e+02 7.498e+02, threshold=6.018e+02, percent-clipped=2.0 +2023-03-09 18:40:42,214 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5496, 3.5587, 3.3915, 3.0683, 3.2892, 2.6335, 2.6891, 3.5400], + device='cuda:1'), covar=tensor([0.0070, 0.0093, 0.0089, 0.0143, 0.0096, 0.0202, 0.0218, 0.0076], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0168, 0.0141, 0.0193, 0.0149, 0.0184, 0.0188, 0.0128], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 18:40:47,057 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88209.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:40:55,849 INFO [train.py:898] (1/4) Epoch 25, batch 1000, loss[loss=0.1535, simple_loss=0.2315, pruned_loss=0.03773, over 18498.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.2491, pruned_loss=0.03431, over 3575051.30 frames. ], batch size: 44, lr: 4.52e-03, grad_scale: 8.0 +2023-03-09 18:41:22,504 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88240.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:41:28,192 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88245.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:41:43,478 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0567, 4.2228, 2.5449, 4.2859, 5.3787, 2.6885, 4.1547, 4.2950], + device='cuda:1'), covar=tensor([0.0164, 0.0971, 0.1542, 0.0521, 0.0071, 0.1133, 0.0574, 0.0567], + device='cuda:1'), in_proj_covar=tensor([0.0178, 0.0279, 0.0208, 0.0203, 0.0136, 0.0188, 0.0223, 0.0231], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 18:41:48,134 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88261.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 18:41:50,401 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0958, 3.9234, 5.2558, 3.1447, 4.5847, 2.7082, 3.2954, 2.0015], + device='cuda:1'), covar=tensor([0.1070, 0.0837, 0.0205, 0.0895, 0.0521, 0.2557, 0.2532, 0.2101], + device='cuda:1'), in_proj_covar=tensor([0.0229, 0.0252, 0.0217, 0.0207, 0.0266, 0.0281, 0.0336, 0.0246], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 18:41:54,316 INFO [train.py:898] (1/4) Epoch 25, batch 1050, loss[loss=0.1659, simple_loss=0.2635, pruned_loss=0.03422, over 18621.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2489, pruned_loss=0.03408, over 3591736.61 frames. ], batch size: 52, lr: 4.52e-03, grad_scale: 8.0 +2023-03-09 18:42:18,435 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88288.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:42:22,097 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88291.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 18:42:24,178 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88293.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:42:29,462 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.898e+02 2.574e+02 3.147e+02 3.590e+02 6.876e+02, threshold=6.293e+02, percent-clipped=1.0 +2023-03-09 18:42:42,896 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88309.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:42:52,750 INFO [train.py:898] (1/4) Epoch 25, batch 1100, loss[loss=0.1963, simple_loss=0.2777, pruned_loss=0.05742, over 12225.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.25, pruned_loss=0.03442, over 3584092.45 frames. ], batch size: 129, lr: 4.52e-03, grad_scale: 8.0 +2023-03-09 18:43:32,789 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88352.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 18:43:50,008 INFO [train.py:898] (1/4) Epoch 25, batch 1150, loss[loss=0.1437, simple_loss=0.2289, pruned_loss=0.02922, over 18553.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2491, pruned_loss=0.03421, over 3582280.41 frames. ], batch size: 49, lr: 4.51e-03, grad_scale: 8.0 +2023-03-09 18:44:25,857 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.694e+02 2.731e+02 3.095e+02 3.646e+02 6.544e+02, threshold=6.189e+02, percent-clipped=1.0 +2023-03-09 18:44:26,126 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3138, 5.3626, 5.6144, 5.7268, 5.2587, 6.1717, 5.8573, 5.4966], + device='cuda:1'), covar=tensor([0.1135, 0.0533, 0.0745, 0.0736, 0.1330, 0.0604, 0.0561, 0.1429], + device='cuda:1'), in_proj_covar=tensor([0.0367, 0.0295, 0.0320, 0.0326, 0.0336, 0.0433, 0.0290, 0.0425], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 18:44:45,206 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7243, 3.4495, 4.6647, 4.0624, 3.1059, 2.8813, 4.2308, 4.9320], + device='cuda:1'), covar=tensor([0.0787, 0.1469, 0.0277, 0.0459, 0.1027, 0.1287, 0.0387, 0.0240], + device='cuda:1'), in_proj_covar=tensor([0.0152, 0.0280, 0.0165, 0.0186, 0.0195, 0.0194, 0.0197, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 18:44:48,679 INFO [train.py:898] (1/4) Epoch 25, batch 1200, loss[loss=0.1462, simple_loss=0.2259, pruned_loss=0.0333, over 18435.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2489, pruned_loss=0.0342, over 3582830.92 frames. ], batch size: 43, lr: 4.51e-03, grad_scale: 8.0 +2023-03-09 18:45:00,568 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.6462, 5.5953, 5.2527, 5.5812, 5.5284, 4.9721, 5.4672, 5.2102], + device='cuda:1'), covar=tensor([0.0374, 0.0386, 0.1275, 0.0682, 0.0581, 0.0405, 0.0427, 0.1041], + device='cuda:1'), in_proj_covar=tensor([0.0504, 0.0571, 0.0715, 0.0445, 0.0468, 0.0520, 0.0558, 0.0691], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 18:45:29,983 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.36 vs. limit=5.0 +2023-03-09 18:45:33,320 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 18:45:34,112 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=88456.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:45:46,399 INFO [train.py:898] (1/4) Epoch 25, batch 1250, loss[loss=0.1654, simple_loss=0.2528, pruned_loss=0.03894, over 18613.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2491, pruned_loss=0.03438, over 3589884.42 frames. ], batch size: 52, lr: 4.51e-03, grad_scale: 16.0 +2023-03-09 18:46:22,886 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.846e+02 2.624e+02 3.040e+02 3.727e+02 1.203e+03, threshold=6.079e+02, percent-clipped=2.0 +2023-03-09 18:46:44,554 INFO [train.py:898] (1/4) Epoch 25, batch 1300, loss[loss=0.1543, simple_loss=0.247, pruned_loss=0.03081, over 17829.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.249, pruned_loss=0.03412, over 3590022.79 frames. ], batch size: 70, lr: 4.51e-03, grad_scale: 16.0 +2023-03-09 18:47:00,764 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7627, 3.1216, 4.5280, 3.9260, 2.6469, 4.7964, 4.0349, 3.0418], + device='cuda:1'), covar=tensor([0.0533, 0.1513, 0.0276, 0.0455, 0.1850, 0.0225, 0.0610, 0.1002], + device='cuda:1'), in_proj_covar=tensor([0.0217, 0.0244, 0.0226, 0.0169, 0.0227, 0.0218, 0.0256, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 18:47:38,465 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-09 18:47:42,879 INFO [train.py:898] (1/4) Epoch 25, batch 1350, loss[loss=0.1742, simple_loss=0.2654, pruned_loss=0.0415, over 12347.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.2494, pruned_loss=0.03412, over 3594078.69 frames. ], batch size: 130, lr: 4.51e-03, grad_scale: 16.0 +2023-03-09 18:48:19,808 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.775e+02 2.523e+02 2.984e+02 3.616e+02 6.843e+02, threshold=5.967e+02, percent-clipped=1.0 +2023-03-09 18:48:38,385 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8799, 4.1616, 2.3238, 4.0862, 5.1735, 2.7905, 3.6858, 3.8233], + device='cuda:1'), covar=tensor([0.0205, 0.1133, 0.1807, 0.0675, 0.0106, 0.1161, 0.0824, 0.0959], + device='cuda:1'), in_proj_covar=tensor([0.0178, 0.0279, 0.0208, 0.0204, 0.0137, 0.0189, 0.0222, 0.0231], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 18:48:41,212 INFO [train.py:898] (1/4) Epoch 25, batch 1400, loss[loss=0.156, simple_loss=0.2408, pruned_loss=0.03555, over 18377.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2487, pruned_loss=0.03399, over 3601042.64 frames. ], batch size: 46, lr: 4.51e-03, grad_scale: 16.0 +2023-03-09 18:49:17,086 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=88647.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 18:49:39,308 INFO [train.py:898] (1/4) Epoch 25, batch 1450, loss[loss=0.1785, simple_loss=0.2672, pruned_loss=0.04488, over 17912.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2493, pruned_loss=0.03404, over 3605584.81 frames. ], batch size: 65, lr: 4.51e-03, grad_scale: 8.0 +2023-03-09 18:50:17,346 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.835e+02 2.449e+02 2.886e+02 3.614e+02 7.165e+02, threshold=5.771e+02, percent-clipped=1.0 +2023-03-09 18:50:37,470 INFO [train.py:898] (1/4) Epoch 25, batch 1500, loss[loss=0.1574, simple_loss=0.2528, pruned_loss=0.03101, over 18435.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2491, pruned_loss=0.03411, over 3613483.79 frames. ], batch size: 59, lr: 4.51e-03, grad_scale: 8.0 +2023-03-09 18:51:08,668 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-09 18:51:23,348 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88756.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:51:25,918 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.06 vs. limit=5.0 +2023-03-09 18:51:35,258 INFO [train.py:898] (1/4) Epoch 25, batch 1550, loss[loss=0.1598, simple_loss=0.2576, pruned_loss=0.03098, over 18564.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2485, pruned_loss=0.03397, over 3612747.22 frames. ], batch size: 54, lr: 4.50e-03, grad_scale: 8.0 +2023-03-09 18:51:40,610 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-03-09 18:52:06,681 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-09 18:52:13,192 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.757e+02 2.636e+02 3.082e+02 3.617e+02 8.587e+02, threshold=6.165e+02, percent-clipped=5.0 +2023-03-09 18:52:20,013 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88804.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:52:34,352 INFO [train.py:898] (1/4) Epoch 25, batch 1600, loss[loss=0.1665, simple_loss=0.265, pruned_loss=0.03398, over 18101.00 frames. ], tot_loss[loss=0.1584, simple_loss=0.2489, pruned_loss=0.03396, over 3602661.65 frames. ], batch size: 62, lr: 4.50e-03, grad_scale: 8.0 +2023-03-09 18:52:55,350 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7522, 3.7347, 3.4723, 3.2143, 3.4298, 2.8687, 2.9507, 3.7195], + device='cuda:1'), covar=tensor([0.0060, 0.0086, 0.0086, 0.0126, 0.0091, 0.0178, 0.0176, 0.0067], + device='cuda:1'), in_proj_covar=tensor([0.0148, 0.0167, 0.0140, 0.0192, 0.0148, 0.0181, 0.0186, 0.0127], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 18:53:32,609 INFO [train.py:898] (1/4) Epoch 25, batch 1650, loss[loss=0.1685, simple_loss=0.2638, pruned_loss=0.03663, over 18295.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2493, pruned_loss=0.03423, over 3603193.97 frames. ], batch size: 57, lr: 4.50e-03, grad_scale: 8.0 +2023-03-09 18:54:09,628 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.772e+02 2.755e+02 3.165e+02 3.755e+02 1.372e+03, threshold=6.330e+02, percent-clipped=6.0 +2023-03-09 18:54:30,546 INFO [train.py:898] (1/4) Epoch 25, batch 1700, loss[loss=0.1684, simple_loss=0.2624, pruned_loss=0.03725, over 18313.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2491, pruned_loss=0.0341, over 3604497.19 frames. ], batch size: 54, lr: 4.50e-03, grad_scale: 8.0 +2023-03-09 18:54:36,655 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-09 18:55:04,792 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88947.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 18:55:28,682 INFO [train.py:898] (1/4) Epoch 25, batch 1750, loss[loss=0.1399, simple_loss=0.2208, pruned_loss=0.02948, over 18474.00 frames. ], tot_loss[loss=0.1584, simple_loss=0.2488, pruned_loss=0.03397, over 3606563.94 frames. ], batch size: 44, lr: 4.50e-03, grad_scale: 8.0 +2023-03-09 18:55:42,373 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9312, 5.2745, 2.9371, 5.1074, 4.9978, 5.2880, 5.0993, 2.6866], + device='cuda:1'), covar=tensor([0.0234, 0.0065, 0.0764, 0.0083, 0.0071, 0.0064, 0.0087, 0.1018], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0083, 0.0098, 0.0098, 0.0089, 0.0078, 0.0086, 0.0098], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 18:55:45,613 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5653, 3.2884, 2.3121, 4.2194, 3.0180, 4.0431, 2.3770, 3.8495], + device='cuda:1'), covar=tensor([0.0644, 0.0896, 0.1279, 0.0502, 0.0811, 0.0286, 0.1200, 0.0386], + device='cuda:1'), in_proj_covar=tensor([0.0219, 0.0230, 0.0193, 0.0292, 0.0195, 0.0269, 0.0205, 0.0205], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 18:56:00,685 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88995.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 18:56:04,900 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.658e+02 2.478e+02 2.880e+02 3.518e+02 7.008e+02, threshold=5.760e+02, percent-clipped=1.0 +2023-03-09 18:56:27,120 INFO [train.py:898] (1/4) Epoch 25, batch 1800, loss[loss=0.1744, simple_loss=0.2693, pruned_loss=0.03978, over 18395.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2494, pruned_loss=0.03418, over 3605195.73 frames. ], batch size: 50, lr: 4.50e-03, grad_scale: 4.0 +2023-03-09 18:56:27,409 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1849, 5.1702, 4.8049, 5.1226, 5.1207, 4.5127, 5.0147, 4.7539], + device='cuda:1'), covar=tensor([0.0464, 0.0499, 0.1438, 0.0770, 0.0645, 0.0496, 0.0469, 0.1177], + device='cuda:1'), in_proj_covar=tensor([0.0514, 0.0581, 0.0721, 0.0453, 0.0476, 0.0526, 0.0565, 0.0702], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 18:56:34,295 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8915, 3.8878, 3.7176, 3.3113, 3.6185, 2.8483, 2.7754, 3.8531], + device='cuda:1'), covar=tensor([0.0067, 0.0087, 0.0085, 0.0137, 0.0104, 0.0224, 0.0282, 0.0073], + device='cuda:1'), in_proj_covar=tensor([0.0147, 0.0167, 0.0139, 0.0191, 0.0147, 0.0180, 0.0186, 0.0127], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 18:56:38,807 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89027.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:57:25,676 INFO [train.py:898] (1/4) Epoch 25, batch 1850, loss[loss=0.1557, simple_loss=0.2467, pruned_loss=0.0323, over 18107.00 frames. ], tot_loss[loss=0.1585, simple_loss=0.2494, pruned_loss=0.03385, over 3607629.44 frames. ], batch size: 62, lr: 4.50e-03, grad_scale: 4.0 +2023-03-09 18:57:28,359 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7204, 2.9826, 2.6663, 3.0843, 3.7328, 3.7258, 3.2797, 3.0775], + device='cuda:1'), covar=tensor([0.0166, 0.0269, 0.0534, 0.0334, 0.0177, 0.0141, 0.0344, 0.0342], + device='cuda:1'), in_proj_covar=tensor([0.0143, 0.0143, 0.0166, 0.0165, 0.0139, 0.0123, 0.0160, 0.0163], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 18:57:42,948 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7557, 4.8011, 4.8877, 4.5931, 4.5264, 4.6012, 4.9367, 4.9655], + device='cuda:1'), covar=tensor([0.0067, 0.0066, 0.0058, 0.0105, 0.0073, 0.0162, 0.0085, 0.0088], + device='cuda:1'), in_proj_covar=tensor([0.0097, 0.0072, 0.0078, 0.0096, 0.0077, 0.0107, 0.0089, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 18:57:49,932 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89088.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:58:03,149 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.649e+02 2.662e+02 3.134e+02 3.843e+02 1.322e+03, threshold=6.269e+02, percent-clipped=2.0 +2023-03-09 18:58:11,911 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8724, 3.6647, 4.8927, 4.3662, 3.3458, 3.0743, 4.3892, 5.1703], + device='cuda:1'), covar=tensor([0.0796, 0.1435, 0.0195, 0.0427, 0.0947, 0.1226, 0.0400, 0.0290], + device='cuda:1'), in_proj_covar=tensor([0.0151, 0.0278, 0.0164, 0.0184, 0.0194, 0.0192, 0.0197, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 18:58:15,116 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7862, 5.1713, 5.1577, 5.1653, 4.6330, 5.0438, 4.5394, 5.0916], + device='cuda:1'), covar=tensor([0.0246, 0.0273, 0.0204, 0.0431, 0.0410, 0.0276, 0.1058, 0.0317], + device='cuda:1'), in_proj_covar=tensor([0.0228, 0.0272, 0.0272, 0.0348, 0.0284, 0.0282, 0.0316, 0.0273], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 18:58:23,378 INFO [train.py:898] (1/4) Epoch 25, batch 1900, loss[loss=0.1955, simple_loss=0.2766, pruned_loss=0.05725, over 12107.00 frames. ], tot_loss[loss=0.1584, simple_loss=0.2491, pruned_loss=0.03385, over 3601677.98 frames. ], batch size: 130, lr: 4.50e-03, grad_scale: 4.0 +2023-03-09 18:59:22,368 INFO [train.py:898] (1/4) Epoch 25, batch 1950, loss[loss=0.1871, simple_loss=0.2715, pruned_loss=0.05132, over 18143.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.2508, pruned_loss=0.03449, over 3580711.94 frames. ], batch size: 62, lr: 4.49e-03, grad_scale: 4.0 +2023-03-09 18:59:25,398 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89169.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:59:49,533 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3608, 5.9380, 5.4205, 5.7472, 5.5417, 5.4065, 6.0200, 6.0090], + device='cuda:1'), covar=tensor([0.1348, 0.0812, 0.0577, 0.0734, 0.1455, 0.0804, 0.0620, 0.0675], + device='cuda:1'), in_proj_covar=tensor([0.0622, 0.0542, 0.0395, 0.0574, 0.0766, 0.0565, 0.0781, 0.0591], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 18:59:50,864 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5467, 2.3313, 2.5492, 2.5967, 2.9643, 4.6375, 4.4536, 3.0927], + device='cuda:1'), covar=tensor([0.2113, 0.2652, 0.3097, 0.2094, 0.2813, 0.0283, 0.0457, 0.1118], + device='cuda:1'), in_proj_covar=tensor([0.0319, 0.0358, 0.0400, 0.0288, 0.0395, 0.0257, 0.0302, 0.0267], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 19:00:00,687 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.906e+02 2.686e+02 3.109e+02 3.749e+02 7.120e+02, threshold=6.217e+02, percent-clipped=3.0 +2023-03-09 19:00:20,454 INFO [train.py:898] (1/4) Epoch 25, batch 2000, loss[loss=0.1784, simple_loss=0.2735, pruned_loss=0.04169, over 18028.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.2507, pruned_loss=0.03456, over 3586651.39 frames. ], batch size: 65, lr: 4.49e-03, grad_scale: 8.0 +2023-03-09 19:00:36,510 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89230.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:01:18,544 INFO [train.py:898] (1/4) Epoch 25, batch 2050, loss[loss=0.1498, simple_loss=0.2493, pruned_loss=0.02512, over 18346.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2503, pruned_loss=0.03449, over 3583348.85 frames. ], batch size: 55, lr: 4.49e-03, grad_scale: 8.0 +2023-03-09 19:01:42,991 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4117, 5.3778, 5.0290, 5.3515, 5.3369, 4.7462, 5.2506, 5.0161], + device='cuda:1'), covar=tensor([0.0410, 0.0452, 0.1263, 0.0739, 0.0563, 0.0448, 0.0398, 0.1012], + device='cuda:1'), in_proj_covar=tensor([0.0514, 0.0578, 0.0718, 0.0450, 0.0472, 0.0524, 0.0563, 0.0698], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 19:01:57,828 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.761e+02 2.663e+02 3.151e+02 3.828e+02 7.716e+02, threshold=6.301e+02, percent-clipped=2.0 +2023-03-09 19:02:17,080 INFO [train.py:898] (1/4) Epoch 25, batch 2100, loss[loss=0.155, simple_loss=0.2538, pruned_loss=0.02805, over 18583.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2497, pruned_loss=0.03412, over 3591807.47 frames. ], batch size: 54, lr: 4.49e-03, grad_scale: 8.0 +2023-03-09 19:02:28,123 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6740, 2.3153, 2.6289, 2.6943, 3.1169, 4.8262, 4.7059, 3.4889], + device='cuda:1'), covar=tensor([0.2038, 0.2623, 0.2951, 0.1917, 0.2548, 0.0261, 0.0359, 0.0947], + device='cuda:1'), in_proj_covar=tensor([0.0319, 0.0357, 0.0399, 0.0287, 0.0394, 0.0257, 0.0302, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 19:02:50,044 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.56 vs. limit=5.0 +2023-03-09 19:03:15,258 INFO [train.py:898] (1/4) Epoch 25, batch 2150, loss[loss=0.1534, simple_loss=0.2455, pruned_loss=0.03069, over 18297.00 frames. ], tot_loss[loss=0.1592, simple_loss=0.2501, pruned_loss=0.03418, over 3590119.97 frames. ], batch size: 49, lr: 4.49e-03, grad_scale: 8.0 +2023-03-09 19:03:24,295 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89374.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:03:35,637 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89383.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:03:38,252 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0573, 4.4143, 2.4251, 4.3117, 5.3642, 2.8945, 4.1582, 4.2355], + device='cuda:1'), covar=tensor([0.0197, 0.1072, 0.1619, 0.0603, 0.0116, 0.1071, 0.0578, 0.0620], + device='cuda:1'), in_proj_covar=tensor([0.0177, 0.0277, 0.0207, 0.0201, 0.0136, 0.0187, 0.0221, 0.0230], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 19:03:54,951 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.788e+02 2.675e+02 3.172e+02 3.693e+02 8.174e+02, threshold=6.344e+02, percent-clipped=3.0 +2023-03-09 19:04:15,097 INFO [train.py:898] (1/4) Epoch 25, batch 2200, loss[loss=0.1672, simple_loss=0.2606, pruned_loss=0.03688, over 18570.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2508, pruned_loss=0.03457, over 3568682.21 frames. ], batch size: 54, lr: 4.49e-03, grad_scale: 8.0 +2023-03-09 19:04:25,037 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89425.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:04:36,875 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89435.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:04:41,857 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89439.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 19:04:46,722 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-03-09 19:05:13,787 INFO [train.py:898] (1/4) Epoch 25, batch 2250, loss[loss=0.1677, simple_loss=0.2643, pruned_loss=0.03558, over 18201.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2509, pruned_loss=0.03453, over 3569955.14 frames. ], batch size: 60, lr: 4.49e-03, grad_scale: 8.0 +2023-03-09 19:05:38,384 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89486.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:05:53,609 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.616e+02 2.577e+02 2.992e+02 3.589e+02 8.023e+02, threshold=5.985e+02, percent-clipped=1.0 +2023-03-09 19:05:53,869 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89500.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 19:06:04,242 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89509.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:06:12,814 INFO [train.py:898] (1/4) Epoch 25, batch 2300, loss[loss=0.1811, simple_loss=0.2705, pruned_loss=0.04582, over 18019.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2504, pruned_loss=0.03442, over 3586270.96 frames. ], batch size: 65, lr: 4.49e-03, grad_scale: 8.0 +2023-03-09 19:06:20,030 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7621, 4.9464, 2.6033, 4.7206, 4.6704, 4.9295, 4.7684, 2.6777], + device='cuda:1'), covar=tensor([0.0256, 0.0068, 0.0844, 0.0114, 0.0086, 0.0081, 0.0092, 0.0953], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0083, 0.0098, 0.0098, 0.0089, 0.0079, 0.0086, 0.0098], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 19:06:22,687 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89525.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:06:47,618 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3366, 5.8982, 5.4485, 5.7528, 5.4773, 5.3481, 5.9516, 5.8928], + device='cuda:1'), covar=tensor([0.1234, 0.0790, 0.0572, 0.0677, 0.1435, 0.0741, 0.0577, 0.0734], + device='cuda:1'), in_proj_covar=tensor([0.0625, 0.0551, 0.0400, 0.0578, 0.0773, 0.0572, 0.0788, 0.0595], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 19:06:53,377 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7473, 3.0105, 2.5722, 2.9605, 3.7320, 3.6163, 3.2512, 2.9365], + device='cuda:1'), covar=tensor([0.0168, 0.0299, 0.0574, 0.0401, 0.0171, 0.0171, 0.0343, 0.0461], + device='cuda:1'), in_proj_covar=tensor([0.0143, 0.0142, 0.0166, 0.0162, 0.0137, 0.0123, 0.0159, 0.0161], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 19:06:54,516 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8042, 3.1203, 2.6802, 3.0477, 3.8000, 3.7147, 3.3455, 3.0434], + device='cuda:1'), covar=tensor([0.0168, 0.0279, 0.0539, 0.0402, 0.0176, 0.0141, 0.0330, 0.0351], + device='cuda:1'), in_proj_covar=tensor([0.0143, 0.0142, 0.0165, 0.0162, 0.0137, 0.0123, 0.0159, 0.0161], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 19:07:11,312 INFO [train.py:898] (1/4) Epoch 25, batch 2350, loss[loss=0.1657, simple_loss=0.2616, pruned_loss=0.03485, over 16998.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2495, pruned_loss=0.03414, over 3595730.43 frames. ], batch size: 78, lr: 4.48e-03, grad_scale: 8.0 +2023-03-09 19:07:15,027 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89570.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:07:36,113 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3163, 3.2837, 3.1774, 2.9401, 3.1206, 2.6111, 2.6234, 3.2975], + device='cuda:1'), covar=tensor([0.0083, 0.0115, 0.0094, 0.0145, 0.0116, 0.0191, 0.0220, 0.0081], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0168, 0.0140, 0.0193, 0.0149, 0.0183, 0.0186, 0.0127], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 19:07:38,013 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5642, 2.3746, 2.5622, 2.5597, 3.1531, 4.5874, 4.5361, 3.1727], + device='cuda:1'), covar=tensor([0.2066, 0.2676, 0.3210, 0.2171, 0.2461, 0.0327, 0.0418, 0.1134], + device='cuda:1'), in_proj_covar=tensor([0.0321, 0.0359, 0.0401, 0.0289, 0.0396, 0.0258, 0.0303, 0.0268], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 19:07:43,501 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0330, 5.0600, 5.1659, 4.8635, 4.8726, 4.8894, 5.2205, 5.2547], + device='cuda:1'), covar=tensor([0.0066, 0.0066, 0.0056, 0.0104, 0.0058, 0.0162, 0.0065, 0.0091], + device='cuda:1'), in_proj_covar=tensor([0.0098, 0.0073, 0.0078, 0.0097, 0.0078, 0.0108, 0.0090, 0.0089], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 19:07:51,625 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.602e+02 2.634e+02 3.010e+02 3.571e+02 1.034e+03, threshold=6.019e+02, percent-clipped=2.0 +2023-03-09 19:08:09,864 INFO [train.py:898] (1/4) Epoch 25, batch 2400, loss[loss=0.1529, simple_loss=0.2353, pruned_loss=0.03523, over 18403.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2505, pruned_loss=0.03475, over 3584895.72 frames. ], batch size: 43, lr: 4.48e-03, grad_scale: 8.0 +2023-03-09 19:09:07,612 INFO [train.py:898] (1/4) Epoch 25, batch 2450, loss[loss=0.1544, simple_loss=0.2445, pruned_loss=0.03213, over 18367.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2501, pruned_loss=0.03465, over 3588879.69 frames. ], batch size: 46, lr: 4.48e-03, grad_scale: 4.0 +2023-03-09 19:09:23,484 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89680.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:09:26,962 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89683.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:09:49,621 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.687e+02 2.547e+02 3.016e+02 3.415e+02 5.579e+02, threshold=6.031e+02, percent-clipped=0.0 +2023-03-09 19:10:02,391 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9083, 4.8200, 4.9873, 4.6377, 4.6958, 4.8260, 5.0771, 4.9791], + device='cuda:1'), covar=tensor([0.0094, 0.0115, 0.0106, 0.0162, 0.0102, 0.0224, 0.0106, 0.0134], + device='cuda:1'), in_proj_covar=tensor([0.0099, 0.0074, 0.0079, 0.0098, 0.0078, 0.0108, 0.0091, 0.0089], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 19:10:06,502 INFO [train.py:898] (1/4) Epoch 25, batch 2500, loss[loss=0.1725, simple_loss=0.2625, pruned_loss=0.04126, over 18326.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2497, pruned_loss=0.03449, over 3599260.47 frames. ], batch size: 54, lr: 4.48e-03, grad_scale: 4.0 +2023-03-09 19:10:21,694 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89730.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:10:22,814 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89731.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:10:34,987 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89741.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:11:04,385 INFO [train.py:898] (1/4) Epoch 25, batch 2550, loss[loss=0.1461, simple_loss=0.2323, pruned_loss=0.02992, over 18414.00 frames. ], tot_loss[loss=0.159, simple_loss=0.2493, pruned_loss=0.03441, over 3594031.53 frames. ], batch size: 48, lr: 4.48e-03, grad_scale: 4.0 +2023-03-09 19:11:20,625 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89781.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:11:37,075 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89795.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 19:11:45,424 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.598e+02 2.460e+02 2.989e+02 3.597e+02 5.800e+02, threshold=5.978e+02, percent-clipped=0.0 +2023-03-09 19:12:03,017 INFO [train.py:898] (1/4) Epoch 25, batch 2600, loss[loss=0.1485, simple_loss=0.2338, pruned_loss=0.03159, over 18564.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.2496, pruned_loss=0.03472, over 3582133.42 frames. ], batch size: 45, lr: 4.48e-03, grad_scale: 4.0 +2023-03-09 19:12:12,956 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89825.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:12:19,309 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-03-09 19:12:59,239 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89865.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:12:59,380 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89865.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:13:01,344 INFO [train.py:898] (1/4) Epoch 25, batch 2650, loss[loss=0.1345, simple_loss=0.2106, pruned_loss=0.0292, over 18524.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.2494, pruned_loss=0.03478, over 3578434.64 frames. ], batch size: 44, lr: 4.48e-03, grad_scale: 4.0 +2023-03-09 19:13:08,338 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89873.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:13:35,491 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0376, 3.8036, 5.1939, 3.1549, 4.5575, 2.6778, 3.1408, 1.9466], + device='cuda:1'), covar=tensor([0.1085, 0.0931, 0.0186, 0.0858, 0.0463, 0.2576, 0.2632, 0.2236], + device='cuda:1'), in_proj_covar=tensor([0.0227, 0.0249, 0.0215, 0.0204, 0.0263, 0.0277, 0.0331, 0.0244], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 19:13:41,748 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.822e+02 2.660e+02 3.253e+02 3.912e+02 1.582e+03, threshold=6.506e+02, percent-clipped=3.0 +2023-03-09 19:13:56,659 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6790, 3.7120, 3.4977, 3.2038, 3.4298, 2.8092, 2.8861, 3.7146], + device='cuda:1'), covar=tensor([0.0068, 0.0084, 0.0091, 0.0143, 0.0095, 0.0198, 0.0198, 0.0070], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0169, 0.0142, 0.0193, 0.0150, 0.0184, 0.0188, 0.0128], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 19:13:59,662 INFO [train.py:898] (1/4) Epoch 25, batch 2700, loss[loss=0.1582, simple_loss=0.2485, pruned_loss=0.034, over 18255.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2494, pruned_loss=0.03455, over 3577753.83 frames. ], batch size: 47, lr: 4.48e-03, grad_scale: 4.0 +2023-03-09 19:14:06,690 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1985, 5.2077, 5.4948, 5.4993, 5.2151, 5.9743, 5.6841, 5.2038], + device='cuda:1'), covar=tensor([0.1156, 0.0743, 0.0910, 0.0720, 0.1330, 0.0740, 0.0617, 0.1688], + device='cuda:1'), in_proj_covar=tensor([0.0369, 0.0299, 0.0325, 0.0327, 0.0339, 0.0440, 0.0292, 0.0430], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 19:14:09,987 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89926.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 19:14:41,678 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5149, 2.8366, 2.5385, 2.8396, 3.5455, 3.4607, 3.0700, 2.7487], + device='cuda:1'), covar=tensor([0.0173, 0.0272, 0.0550, 0.0410, 0.0175, 0.0185, 0.0374, 0.0439], + device='cuda:1'), in_proj_covar=tensor([0.0142, 0.0142, 0.0165, 0.0163, 0.0138, 0.0123, 0.0158, 0.0161], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 19:14:42,702 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6230, 2.4346, 4.4150, 3.9194, 2.2750, 4.5546, 3.8930, 2.6822], + device='cuda:1'), covar=tensor([0.0475, 0.2111, 0.0288, 0.0354, 0.2259, 0.0256, 0.0605, 0.1336], + device='cuda:1'), in_proj_covar=tensor([0.0220, 0.0246, 0.0229, 0.0171, 0.0228, 0.0220, 0.0259, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 19:14:58,044 INFO [train.py:898] (1/4) Epoch 25, batch 2750, loss[loss=0.1609, simple_loss=0.2572, pruned_loss=0.03227, over 17145.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2501, pruned_loss=0.03465, over 3565854.24 frames. ], batch size: 78, lr: 4.47e-03, grad_scale: 4.0 +2023-03-09 19:15:43,118 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.861e+02 2.579e+02 2.972e+02 3.453e+02 7.499e+02, threshold=5.944e+02, percent-clipped=1.0 +2023-03-09 19:16:00,184 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7080, 3.1915, 4.5471, 3.8300, 3.0152, 4.7371, 3.9912, 3.1919], + device='cuda:1'), covar=tensor([0.0525, 0.1280, 0.0279, 0.0470, 0.1369, 0.0219, 0.0589, 0.0844], + device='cuda:1'), in_proj_covar=tensor([0.0220, 0.0246, 0.0229, 0.0171, 0.0229, 0.0220, 0.0258, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 19:16:00,866 INFO [train.py:898] (1/4) Epoch 25, batch 2800, loss[loss=0.1544, simple_loss=0.2387, pruned_loss=0.03503, over 18526.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2505, pruned_loss=0.03493, over 3557410.24 frames. ], batch size: 49, lr: 4.47e-03, grad_scale: 8.0 +2023-03-09 19:16:02,279 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6089, 4.6630, 4.7406, 4.4511, 4.4785, 4.4360, 4.7701, 4.8228], + device='cuda:1'), covar=tensor([0.0088, 0.0075, 0.0072, 0.0130, 0.0072, 0.0203, 0.0092, 0.0103], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0074, 0.0079, 0.0098, 0.0078, 0.0109, 0.0091, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 19:16:15,920 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90030.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:16:22,598 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90036.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:16:49,478 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7634, 4.4314, 4.4341, 3.2388, 3.5959, 3.3777, 2.6213, 2.4952], + device='cuda:1'), covar=tensor([0.0248, 0.0174, 0.0102, 0.0361, 0.0379, 0.0257, 0.0768, 0.0878], + device='cuda:1'), in_proj_covar=tensor([0.0075, 0.0063, 0.0068, 0.0071, 0.0093, 0.0071, 0.0080, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 19:16:57,954 INFO [train.py:898] (1/4) Epoch 25, batch 2850, loss[loss=0.1499, simple_loss=0.24, pruned_loss=0.02989, over 17261.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.2501, pruned_loss=0.03478, over 3563476.92 frames. ], batch size: 38, lr: 4.47e-03, grad_scale: 8.0 +2023-03-09 19:17:11,247 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90078.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:17:14,782 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90081.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:17:31,048 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90095.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 19:17:38,642 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.912e+02 2.603e+02 3.094e+02 3.729e+02 6.556e+02, threshold=6.188e+02, percent-clipped=2.0 +2023-03-09 19:17:56,098 INFO [train.py:898] (1/4) Epoch 25, batch 2900, loss[loss=0.1815, simple_loss=0.2758, pruned_loss=0.04359, over 16342.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2501, pruned_loss=0.03465, over 3573883.00 frames. ], batch size: 94, lr: 4.47e-03, grad_scale: 8.0 +2023-03-09 19:18:10,694 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90129.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:18:12,020 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90130.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:18:26,752 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90143.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 19:18:52,954 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90165.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:18:54,881 INFO [train.py:898] (1/4) Epoch 25, batch 2950, loss[loss=0.1839, simple_loss=0.2702, pruned_loss=0.04884, over 17765.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2505, pruned_loss=0.03479, over 3558038.37 frames. ], batch size: 70, lr: 4.47e-03, grad_scale: 8.0 +2023-03-09 19:19:23,422 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90191.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:19:36,174 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.680e+02 2.539e+02 2.924e+02 3.398e+02 8.084e+02, threshold=5.847e+02, percent-clipped=3.0 +2023-03-09 19:19:48,869 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90213.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:19:53,268 INFO [train.py:898] (1/4) Epoch 25, batch 3000, loss[loss=0.1684, simple_loss=0.2616, pruned_loss=0.03759, over 18200.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2507, pruned_loss=0.0349, over 3553278.90 frames. ], batch size: 60, lr: 4.47e-03, grad_scale: 8.0 +2023-03-09 19:19:53,268 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 19:20:05,343 INFO [train.py:932] (1/4) Epoch 25, validation: loss=0.1501, simple_loss=0.2485, pruned_loss=0.02584, over 944034.00 frames. +2023-03-09 19:20:05,344 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 19:20:09,989 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90221.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 19:20:22,509 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90232.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:21:03,798 INFO [train.py:898] (1/4) Epoch 25, batch 3050, loss[loss=0.1444, simple_loss=0.2364, pruned_loss=0.02627, over 18380.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.2505, pruned_loss=0.03486, over 3561636.74 frames. ], batch size: 50, lr: 4.47e-03, grad_scale: 8.0 +2023-03-09 19:21:34,110 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90293.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:21:44,551 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.813e+02 2.699e+02 3.154e+02 3.887e+02 1.496e+03, threshold=6.309e+02, percent-clipped=8.0 +2023-03-09 19:22:02,749 INFO [train.py:898] (1/4) Epoch 25, batch 3100, loss[loss=0.1431, simple_loss=0.2231, pruned_loss=0.03152, over 18476.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2497, pruned_loss=0.0345, over 3577166.82 frames. ], batch size: 43, lr: 4.47e-03, grad_scale: 8.0 +2023-03-09 19:22:21,031 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.43 vs. limit=2.0 +2023-03-09 19:22:24,150 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90336.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:22:42,296 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90351.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:23:00,970 INFO [train.py:898] (1/4) Epoch 25, batch 3150, loss[loss=0.1672, simple_loss=0.2632, pruned_loss=0.03558, over 17046.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2489, pruned_loss=0.03424, over 3576002.53 frames. ], batch size: 78, lr: 4.46e-03, grad_scale: 4.0 +2023-03-09 19:23:20,279 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90384.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:23:42,934 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.817e+02 2.713e+02 3.340e+02 4.151e+02 5.769e+02, threshold=6.681e+02, percent-clipped=0.0 +2023-03-09 19:23:53,094 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90412.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:23:59,427 INFO [train.py:898] (1/4) Epoch 25, batch 3200, loss[loss=0.1617, simple_loss=0.2614, pruned_loss=0.03099, over 18476.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2483, pruned_loss=0.0341, over 3576587.86 frames. ], batch size: 51, lr: 4.46e-03, grad_scale: 8.0 +2023-03-09 19:24:28,401 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9180, 4.2019, 4.1495, 4.2248, 3.8232, 4.1159, 3.8042, 4.1446], + device='cuda:1'), covar=tensor([0.0291, 0.0353, 0.0272, 0.0494, 0.0362, 0.0277, 0.0856, 0.0340], + device='cuda:1'), in_proj_covar=tensor([0.0227, 0.0272, 0.0270, 0.0346, 0.0282, 0.0279, 0.0313, 0.0272], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 19:24:48,351 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90459.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:24:57,502 INFO [train.py:898] (1/4) Epoch 25, batch 3250, loss[loss=0.1477, simple_loss=0.2318, pruned_loss=0.03177, over 18337.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2472, pruned_loss=0.03376, over 3579555.98 frames. ], batch size: 46, lr: 4.46e-03, grad_scale: 8.0 +2023-03-09 19:25:08,399 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7990, 5.2879, 5.2407, 5.3635, 4.6890, 5.2136, 4.2359, 5.2140], + device='cuda:1'), covar=tensor([0.0303, 0.0358, 0.0269, 0.0406, 0.0431, 0.0276, 0.1631, 0.0346], + device='cuda:1'), in_proj_covar=tensor([0.0226, 0.0272, 0.0269, 0.0345, 0.0281, 0.0278, 0.0311, 0.0272], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 19:25:19,172 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90486.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:25:31,155 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90496.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:25:38,659 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.740e+02 2.752e+02 3.132e+02 3.659e+02 1.386e+03, threshold=6.263e+02, percent-clipped=2.0 +2023-03-09 19:25:54,830 INFO [train.py:898] (1/4) Epoch 25, batch 3300, loss[loss=0.1513, simple_loss=0.2359, pruned_loss=0.03332, over 18495.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.2478, pruned_loss=0.03399, over 3589767.48 frames. ], batch size: 47, lr: 4.46e-03, grad_scale: 8.0 +2023-03-09 19:25:59,693 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90520.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:26:00,804 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90521.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 19:26:32,634 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8718, 4.5496, 4.5749, 3.4779, 3.7392, 3.5722, 2.9483, 2.8130], + device='cuda:1'), covar=tensor([0.0219, 0.0159, 0.0083, 0.0289, 0.0326, 0.0213, 0.0637, 0.0748], + device='cuda:1'), in_proj_covar=tensor([0.0074, 0.0063, 0.0067, 0.0070, 0.0092, 0.0070, 0.0079, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 19:26:42,205 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90557.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:26:49,191 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-09 19:26:52,887 INFO [train.py:898] (1/4) Epoch 25, batch 3350, loss[loss=0.1448, simple_loss=0.2322, pruned_loss=0.0287, over 18521.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.2474, pruned_loss=0.03388, over 3582760.69 frames. ], batch size: 44, lr: 4.46e-03, grad_scale: 8.0 +2023-03-09 19:26:55,361 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90569.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:27:17,358 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90588.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:27:34,779 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.869e+02 2.603e+02 3.023e+02 3.526e+02 7.217e+02, threshold=6.047e+02, percent-clipped=2.0 +2023-03-09 19:27:40,575 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 19:27:50,757 INFO [train.py:898] (1/4) Epoch 25, batch 3400, loss[loss=0.1524, simple_loss=0.2439, pruned_loss=0.03046, over 18527.00 frames. ], tot_loss[loss=0.1581, simple_loss=0.2481, pruned_loss=0.03403, over 3583062.44 frames. ], batch size: 49, lr: 4.46e-03, grad_scale: 8.0 +2023-03-09 19:28:01,360 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8280, 3.6863, 4.9931, 2.8973, 4.3320, 2.6979, 3.0867, 1.8890], + device='cuda:1'), covar=tensor([0.1202, 0.0963, 0.0167, 0.0987, 0.0509, 0.2434, 0.2605, 0.2220], + device='cuda:1'), in_proj_covar=tensor([0.0228, 0.0251, 0.0217, 0.0206, 0.0264, 0.0279, 0.0333, 0.0244], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 19:28:49,671 INFO [train.py:898] (1/4) Epoch 25, batch 3450, loss[loss=0.172, simple_loss=0.2633, pruned_loss=0.04037, over 18447.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2495, pruned_loss=0.03418, over 3570026.18 frames. ], batch size: 59, lr: 4.46e-03, grad_scale: 8.0 +2023-03-09 19:29:08,403 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5652, 3.1170, 3.8027, 3.5676, 3.0484, 2.9325, 3.5717, 3.9480], + device='cuda:1'), covar=tensor([0.0765, 0.1135, 0.0310, 0.0457, 0.0846, 0.1036, 0.0441, 0.0389], + device='cuda:1'), in_proj_covar=tensor([0.0153, 0.0282, 0.0166, 0.0185, 0.0196, 0.0194, 0.0199, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 19:29:20,820 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4474, 2.8278, 2.5880, 2.8597, 3.5197, 3.3880, 3.1710, 2.8191], + device='cuda:1'), covar=tensor([0.0208, 0.0297, 0.0558, 0.0369, 0.0244, 0.0223, 0.0382, 0.0390], + device='cuda:1'), in_proj_covar=tensor([0.0142, 0.0141, 0.0163, 0.0161, 0.0138, 0.0122, 0.0158, 0.0161], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 19:29:31,656 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.836e+02 2.461e+02 2.925e+02 3.622e+02 7.224e+02, threshold=5.850e+02, percent-clipped=2.0 +2023-03-09 19:29:37,220 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90707.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:29:48,680 INFO [train.py:898] (1/4) Epoch 25, batch 3500, loss[loss=0.137, simple_loss=0.2235, pruned_loss=0.0252, over 18257.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.25, pruned_loss=0.0344, over 3566121.65 frames. ], batch size: 45, lr: 4.46e-03, grad_scale: 8.0 +2023-03-09 19:30:44,126 INFO [train.py:898] (1/4) Epoch 25, batch 3550, loss[loss=0.1797, simple_loss=0.2714, pruned_loss=0.044, over 17601.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.2493, pruned_loss=0.03412, over 3570921.88 frames. ], batch size: 70, lr: 4.45e-03, grad_scale: 8.0 +2023-03-09 19:31:00,426 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-09 19:31:04,151 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90786.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:31:22,416 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.855e+02 2.570e+02 3.030e+02 3.622e+02 8.486e+02, threshold=6.060e+02, percent-clipped=3.0 +2023-03-09 19:31:34,937 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 19:31:35,413 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90815.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:31:37,536 INFO [train.py:898] (1/4) Epoch 25, batch 3600, loss[loss=0.1584, simple_loss=0.256, pruned_loss=0.03037, over 18495.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2503, pruned_loss=0.03459, over 3571214.43 frames. ], batch size: 51, lr: 4.45e-03, grad_scale: 8.0 +2023-03-09 19:31:56,432 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90834.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:32:11,940 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90849.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:32:40,257 INFO [train.py:898] (1/4) Epoch 26, batch 0, loss[loss=0.1454, simple_loss=0.2302, pruned_loss=0.0303, over 18256.00 frames. ], tot_loss[loss=0.1454, simple_loss=0.2302, pruned_loss=0.0303, over 18256.00 frames. ], batch size: 45, lr: 4.36e-03, grad_scale: 8.0 +2023-03-09 19:32:40,257 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 19:32:48,418 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7723, 3.2542, 4.3982, 3.6054, 2.9000, 4.6096, 4.0022, 3.1099], + device='cuda:1'), covar=tensor([0.0478, 0.1252, 0.0295, 0.0543, 0.1549, 0.0237, 0.0545, 0.0945], + device='cuda:1'), in_proj_covar=tensor([0.0214, 0.0242, 0.0225, 0.0169, 0.0225, 0.0216, 0.0253, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 19:32:52,307 INFO [train.py:932] (1/4) Epoch 26, validation: loss=0.1501, simple_loss=0.2487, pruned_loss=0.02573, over 944034.00 frames. +2023-03-09 19:32:52,308 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 19:32:53,492 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90852.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:33:00,037 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6708, 2.9563, 2.7945, 2.9691, 3.7538, 3.6961, 3.3528, 2.9150], + device='cuda:1'), covar=tensor([0.0202, 0.0283, 0.0499, 0.0402, 0.0185, 0.0158, 0.0354, 0.0396], + device='cuda:1'), in_proj_covar=tensor([0.0142, 0.0141, 0.0163, 0.0161, 0.0137, 0.0121, 0.0158, 0.0160], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 19:33:35,244 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90888.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:33:49,734 INFO [train.py:898] (1/4) Epoch 26, batch 50, loss[loss=0.131, simple_loss=0.2201, pruned_loss=0.02098, over 18297.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.2499, pruned_loss=0.03496, over 817225.53 frames. ], batch size: 49, lr: 4.36e-03, grad_scale: 8.0 +2023-03-09 19:33:52,048 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.759e+02 2.622e+02 3.196e+02 4.102e+02 7.514e+02, threshold=6.391e+02, percent-clipped=4.0 +2023-03-09 19:34:01,635 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90910.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:34:31,655 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90936.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:34:44,421 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7591, 2.5148, 2.7230, 2.7867, 3.3228, 4.9309, 4.9335, 3.4823], + device='cuda:1'), covar=tensor([0.2007, 0.2500, 0.3086, 0.1965, 0.2302, 0.0248, 0.0329, 0.1006], + device='cuda:1'), in_proj_covar=tensor([0.0318, 0.0356, 0.0400, 0.0286, 0.0392, 0.0256, 0.0298, 0.0266], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 19:34:48,575 INFO [train.py:898] (1/4) Epoch 26, batch 100, loss[loss=0.1506, simple_loss=0.2366, pruned_loss=0.03227, over 18536.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2496, pruned_loss=0.03457, over 1430861.82 frames. ], batch size: 49, lr: 4.36e-03, grad_scale: 8.0 +2023-03-09 19:35:47,458 INFO [train.py:898] (1/4) Epoch 26, batch 150, loss[loss=0.1523, simple_loss=0.2463, pruned_loss=0.02919, over 18390.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.2475, pruned_loss=0.0337, over 1906641.36 frames. ], batch size: 50, lr: 4.36e-03, grad_scale: 8.0 +2023-03-09 19:35:49,741 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.912e+02 2.712e+02 3.182e+02 3.692e+02 5.749e+02, threshold=6.364e+02, percent-clipped=0.0 +2023-03-09 19:35:54,645 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91007.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:36:19,665 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4625, 5.3741, 5.7489, 5.8472, 5.3238, 6.2726, 5.9380, 5.6167], + device='cuda:1'), covar=tensor([0.0924, 0.0639, 0.0788, 0.0665, 0.1337, 0.0679, 0.0619, 0.1641], + device='cuda:1'), in_proj_covar=tensor([0.0366, 0.0297, 0.0323, 0.0325, 0.0337, 0.0437, 0.0292, 0.0431], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 19:36:28,124 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-09 19:36:46,108 INFO [train.py:898] (1/4) Epoch 26, batch 200, loss[loss=0.1659, simple_loss=0.2535, pruned_loss=0.03914, over 18524.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2485, pruned_loss=0.03356, over 2278889.49 frames. ], batch size: 47, lr: 4.36e-03, grad_scale: 8.0 +2023-03-09 19:36:49,179 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-09 19:36:50,886 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91055.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:37:31,621 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7938, 4.4776, 4.4771, 3.2751, 3.6868, 3.4356, 2.7426, 2.5028], + device='cuda:1'), covar=tensor([0.0218, 0.0145, 0.0092, 0.0361, 0.0339, 0.0254, 0.0724, 0.0860], + device='cuda:1'), in_proj_covar=tensor([0.0073, 0.0062, 0.0067, 0.0070, 0.0091, 0.0069, 0.0078, 0.0084], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 19:37:43,759 INFO [train.py:898] (1/4) Epoch 26, batch 250, loss[loss=0.2044, simple_loss=0.287, pruned_loss=0.06089, over 12538.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2492, pruned_loss=0.03408, over 2550001.58 frames. ], batch size: 130, lr: 4.36e-03, grad_scale: 8.0 +2023-03-09 19:37:45,995 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.600e+02 2.590e+02 3.099e+02 3.572e+02 5.022e+02, threshold=6.197e+02, percent-clipped=0.0 +2023-03-09 19:37:59,548 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91115.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:38:35,879 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-09 19:38:41,416 INFO [train.py:898] (1/4) Epoch 26, batch 300, loss[loss=0.1351, simple_loss=0.2177, pruned_loss=0.02627, over 18518.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2491, pruned_loss=0.03412, over 2778756.71 frames. ], batch size: 44, lr: 4.36e-03, grad_scale: 8.0 +2023-03-09 19:38:42,758 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91152.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:38:55,107 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91163.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:39:06,662 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9195, 5.0180, 5.0212, 4.7525, 4.7699, 4.7238, 5.0864, 5.1376], + device='cuda:1'), covar=tensor([0.0074, 0.0061, 0.0062, 0.0116, 0.0069, 0.0168, 0.0081, 0.0096], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0074, 0.0079, 0.0098, 0.0078, 0.0108, 0.0090, 0.0089], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 19:39:39,111 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91200.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:39:40,111 INFO [train.py:898] (1/4) Epoch 26, batch 350, loss[loss=0.1466, simple_loss=0.2277, pruned_loss=0.03277, over 17656.00 frames. ], tot_loss[loss=0.1581, simple_loss=0.2485, pruned_loss=0.03383, over 2959397.06 frames. ], batch size: 39, lr: 4.36e-03, grad_scale: 8.0 +2023-03-09 19:39:42,442 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.808e+02 2.557e+02 3.001e+02 3.591e+02 6.784e+02, threshold=6.003e+02, percent-clipped=1.0 +2023-03-09 19:39:44,904 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91205.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:40:15,341 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7551, 3.6153, 2.3942, 4.5508, 3.2438, 4.3883, 2.7818, 4.2026], + device='cuda:1'), covar=tensor([0.0702, 0.0803, 0.1544, 0.0526, 0.0849, 0.0360, 0.1124, 0.0366], + device='cuda:1'), in_proj_covar=tensor([0.0223, 0.0232, 0.0195, 0.0296, 0.0197, 0.0272, 0.0208, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 19:40:38,703 INFO [train.py:898] (1/4) Epoch 26, batch 400, loss[loss=0.1528, simple_loss=0.2476, pruned_loss=0.02899, over 18236.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.2482, pruned_loss=0.03377, over 3102590.82 frames. ], batch size: 60, lr: 4.35e-03, grad_scale: 8.0 +2023-03-09 19:41:18,927 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91285.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:41:37,885 INFO [train.py:898] (1/4) Epoch 26, batch 450, loss[loss=0.1804, simple_loss=0.272, pruned_loss=0.04446, over 18349.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2481, pruned_loss=0.03361, over 3209340.43 frames. ], batch size: 56, lr: 4.35e-03, grad_scale: 8.0 +2023-03-09 19:41:40,034 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.596e+02 2.481e+02 2.873e+02 3.390e+02 6.237e+02, threshold=5.746e+02, percent-clipped=1.0 +2023-03-09 19:41:51,788 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6708, 3.6585, 4.9358, 4.3071, 3.2505, 2.9319, 4.3425, 5.1698], + device='cuda:1'), covar=tensor([0.0832, 0.1464, 0.0210, 0.0410, 0.1017, 0.1296, 0.0418, 0.0193], + device='cuda:1'), in_proj_covar=tensor([0.0152, 0.0280, 0.0165, 0.0185, 0.0194, 0.0193, 0.0199, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 19:42:09,738 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.53 vs. limit=2.0 +2023-03-09 19:42:10,690 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8686, 4.6457, 4.6809, 3.5145, 3.7780, 3.5978, 2.7193, 2.8607], + device='cuda:1'), covar=tensor([0.0220, 0.0118, 0.0070, 0.0298, 0.0354, 0.0220, 0.0701, 0.0710], + device='cuda:1'), in_proj_covar=tensor([0.0074, 0.0063, 0.0067, 0.0070, 0.0092, 0.0070, 0.0079, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 19:42:29,815 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91346.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:42:35,722 INFO [train.py:898] (1/4) Epoch 26, batch 500, loss[loss=0.1609, simple_loss=0.253, pruned_loss=0.03439, over 18464.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.2486, pruned_loss=0.03362, over 3306996.93 frames. ], batch size: 53, lr: 4.35e-03, grad_scale: 8.0 +2023-03-09 19:43:32,649 INFO [train.py:898] (1/4) Epoch 26, batch 550, loss[loss=0.1608, simple_loss=0.2575, pruned_loss=0.03203, over 18308.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.2493, pruned_loss=0.0342, over 3356488.13 frames. ], batch size: 54, lr: 4.35e-03, grad_scale: 8.0 +2023-03-09 19:43:35,350 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.676e+02 2.412e+02 2.843e+02 3.584e+02 8.267e+02, threshold=5.686e+02, percent-clipped=4.0 +2023-03-09 19:43:52,925 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-09 19:44:30,263 INFO [train.py:898] (1/4) Epoch 26, batch 600, loss[loss=0.1332, simple_loss=0.2171, pruned_loss=0.02461, over 18267.00 frames. ], tot_loss[loss=0.1585, simple_loss=0.2493, pruned_loss=0.0339, over 3401825.82 frames. ], batch size: 45, lr: 4.35e-03, grad_scale: 8.0 +2023-03-09 19:45:11,610 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0181, 5.4643, 2.9004, 5.3494, 5.2257, 5.5017, 5.2858, 2.8972], + device='cuda:1'), covar=tensor([0.0219, 0.0077, 0.0753, 0.0069, 0.0072, 0.0070, 0.0104, 0.0908], + device='cuda:1'), in_proj_covar=tensor([0.0091, 0.0083, 0.0097, 0.0098, 0.0089, 0.0079, 0.0086, 0.0098], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 19:45:29,307 INFO [train.py:898] (1/4) Epoch 26, batch 650, loss[loss=0.1786, simple_loss=0.2634, pruned_loss=0.04687, over 18286.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.2477, pruned_loss=0.03365, over 3437432.90 frames. ], batch size: 57, lr: 4.35e-03, grad_scale: 8.0 +2023-03-09 19:45:32,528 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.747e+02 2.481e+02 2.888e+02 3.491e+02 6.758e+02, threshold=5.775e+02, percent-clipped=2.0 +2023-03-09 19:45:33,427 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-09 19:45:34,021 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91505.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:46:16,600 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91542.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 19:46:27,363 INFO [train.py:898] (1/4) Epoch 26, batch 700, loss[loss=0.1651, simple_loss=0.2536, pruned_loss=0.03831, over 18474.00 frames. ], tot_loss[loss=0.1581, simple_loss=0.2484, pruned_loss=0.03387, over 3469963.57 frames. ], batch size: 51, lr: 4.35e-03, grad_scale: 4.0 +2023-03-09 19:46:29,757 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91553.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:46:43,425 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4933, 5.9960, 5.5333, 5.7732, 5.5526, 5.4187, 6.0609, 6.0172], + device='cuda:1'), covar=tensor([0.1075, 0.0774, 0.0496, 0.0731, 0.1498, 0.0726, 0.0633, 0.0735], + device='cuda:1'), in_proj_covar=tensor([0.0628, 0.0556, 0.0404, 0.0579, 0.0777, 0.0575, 0.0791, 0.0603], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 19:47:26,221 INFO [train.py:898] (1/4) Epoch 26, batch 750, loss[loss=0.1689, simple_loss=0.2588, pruned_loss=0.03951, over 16152.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2492, pruned_loss=0.03404, over 3497822.62 frames. ], batch size: 94, lr: 4.35e-03, grad_scale: 4.0 +2023-03-09 19:47:29,380 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91603.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 19:47:30,094 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.568e+02 2.592e+02 2.993e+02 3.569e+02 6.310e+02, threshold=5.987e+02, percent-clipped=2.0 +2023-03-09 19:48:12,529 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91641.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:48:23,824 INFO [train.py:898] (1/4) Epoch 26, batch 800, loss[loss=0.1714, simple_loss=0.2654, pruned_loss=0.03868, over 18384.00 frames. ], tot_loss[loss=0.1585, simple_loss=0.249, pruned_loss=0.03405, over 3525672.30 frames. ], batch size: 56, lr: 4.35e-03, grad_scale: 8.0 +2023-03-09 19:48:34,553 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7602, 3.6112, 4.9737, 3.0558, 4.3829, 2.5348, 3.1361, 1.7520], + device='cuda:1'), covar=tensor([0.1336, 0.0983, 0.0215, 0.0931, 0.0530, 0.2688, 0.2649, 0.2357], + device='cuda:1'), in_proj_covar=tensor([0.0228, 0.0250, 0.0218, 0.0206, 0.0263, 0.0277, 0.0335, 0.0242], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 19:49:22,520 INFO [train.py:898] (1/4) Epoch 26, batch 850, loss[loss=0.1481, simple_loss=0.2366, pruned_loss=0.02982, over 18346.00 frames. ], tot_loss[loss=0.1585, simple_loss=0.2491, pruned_loss=0.03399, over 3542048.55 frames. ], batch size: 46, lr: 4.34e-03, grad_scale: 8.0 +2023-03-09 19:49:25,716 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.771e+02 2.531e+02 3.031e+02 3.445e+02 6.326e+02, threshold=6.062e+02, percent-clipped=1.0 +2023-03-09 19:50:20,617 INFO [train.py:898] (1/4) Epoch 26, batch 900, loss[loss=0.1669, simple_loss=0.2561, pruned_loss=0.03889, over 18194.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.2485, pruned_loss=0.03369, over 3554585.75 frames. ], batch size: 60, lr: 4.34e-03, grad_scale: 8.0 +2023-03-09 19:51:14,098 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-09 19:51:16,402 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-03-09 19:51:19,511 INFO [train.py:898] (1/4) Epoch 26, batch 950, loss[loss=0.1542, simple_loss=0.2452, pruned_loss=0.03161, over 16114.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2479, pruned_loss=0.03345, over 3563215.57 frames. ], batch size: 94, lr: 4.34e-03, grad_scale: 8.0 +2023-03-09 19:51:22,552 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.812e+02 2.439e+02 3.074e+02 3.438e+02 1.468e+03, threshold=6.149e+02, percent-clipped=4.0 +2023-03-09 19:52:17,325 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2664, 5.2762, 5.5872, 5.6859, 5.2105, 6.1924, 5.7807, 5.4353], + device='cuda:1'), covar=tensor([0.1202, 0.0640, 0.0745, 0.0730, 0.1467, 0.0673, 0.0665, 0.1606], + device='cuda:1'), in_proj_covar=tensor([0.0363, 0.0296, 0.0322, 0.0325, 0.0337, 0.0435, 0.0291, 0.0427], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 19:52:18,266 INFO [train.py:898] (1/4) Epoch 26, batch 1000, loss[loss=0.1522, simple_loss=0.2369, pruned_loss=0.03378, over 18246.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2489, pruned_loss=0.03383, over 3564971.04 frames. ], batch size: 45, lr: 4.34e-03, grad_scale: 8.0 +2023-03-09 19:53:05,501 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91891.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:53:13,123 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91898.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 19:53:16,255 INFO [train.py:898] (1/4) Epoch 26, batch 1050, loss[loss=0.1611, simple_loss=0.2561, pruned_loss=0.03305, over 17732.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2485, pruned_loss=0.0339, over 3585146.03 frames. ], batch size: 70, lr: 4.34e-03, grad_scale: 8.0 +2023-03-09 19:53:19,766 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.941e+02 2.632e+02 3.014e+02 3.529e+02 5.169e+02, threshold=6.027e+02, percent-clipped=0.0 +2023-03-09 19:53:38,714 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-09 19:54:03,916 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91941.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:54:14,902 INFO [train.py:898] (1/4) Epoch 26, batch 1100, loss[loss=0.1541, simple_loss=0.2484, pruned_loss=0.02991, over 17975.00 frames. ], tot_loss[loss=0.1581, simple_loss=0.2483, pruned_loss=0.03392, over 3582784.53 frames. ], batch size: 65, lr: 4.34e-03, grad_scale: 8.0 +2023-03-09 19:54:16,404 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91952.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:54:19,672 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91955.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:54:50,830 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91981.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 19:54:59,756 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91989.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:55:17,952 INFO [train.py:898] (1/4) Epoch 26, batch 1150, loss[loss=0.1655, simple_loss=0.2606, pruned_loss=0.03523, over 18453.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2489, pruned_loss=0.03422, over 3574121.87 frames. ], batch size: 59, lr: 4.34e-03, grad_scale: 8.0 +2023-03-09 19:55:21,344 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.947e+02 2.558e+02 3.039e+02 3.805e+02 7.483e+02, threshold=6.077e+02, percent-clipped=1.0 +2023-03-09 19:55:34,958 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92016.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:56:06,556 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92042.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 19:56:16,300 INFO [train.py:898] (1/4) Epoch 26, batch 1200, loss[loss=0.1595, simple_loss=0.2585, pruned_loss=0.03028, over 18341.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.2478, pruned_loss=0.03363, over 3588381.08 frames. ], batch size: 55, lr: 4.34e-03, grad_scale: 8.0 +2023-03-09 19:57:14,328 INFO [train.py:898] (1/4) Epoch 26, batch 1250, loss[loss=0.1916, simple_loss=0.2747, pruned_loss=0.05419, over 12205.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2484, pruned_loss=0.03365, over 3575948.52 frames. ], batch size: 130, lr: 4.33e-03, grad_scale: 8.0 +2023-03-09 19:57:17,634 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.939e+02 2.685e+02 3.194e+02 3.689e+02 7.845e+02, threshold=6.387e+02, percent-clipped=2.0 +2023-03-09 19:58:12,742 INFO [train.py:898] (1/4) Epoch 26, batch 1300, loss[loss=0.1964, simple_loss=0.2857, pruned_loss=0.05358, over 18125.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.2479, pruned_loss=0.03359, over 3583793.96 frames. ], batch size: 62, lr: 4.33e-03, grad_scale: 8.0 +2023-03-09 19:58:19,718 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8104, 4.4467, 4.5142, 3.4363, 3.7184, 3.4390, 2.7447, 2.5696], + device='cuda:1'), covar=tensor([0.0217, 0.0156, 0.0085, 0.0297, 0.0346, 0.0240, 0.0672, 0.0826], + device='cuda:1'), in_proj_covar=tensor([0.0074, 0.0063, 0.0068, 0.0070, 0.0092, 0.0071, 0.0079, 0.0086], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 19:58:42,698 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9865, 3.3405, 4.6904, 3.8972, 3.1932, 4.9303, 4.2033, 3.4337], + device='cuda:1'), covar=tensor([0.0420, 0.1294, 0.0229, 0.0453, 0.1320, 0.0179, 0.0489, 0.0766], + device='cuda:1'), in_proj_covar=tensor([0.0218, 0.0244, 0.0230, 0.0171, 0.0227, 0.0220, 0.0258, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 19:58:46,219 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5143, 2.1678, 2.4520, 2.5149, 2.8008, 4.7538, 4.6080, 3.3506], + device='cuda:1'), covar=tensor([0.2469, 0.3469, 0.3795, 0.2485, 0.3839, 0.0320, 0.0471, 0.1118], + device='cuda:1'), in_proj_covar=tensor([0.0321, 0.0358, 0.0403, 0.0288, 0.0394, 0.0259, 0.0301, 0.0268], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 19:59:07,893 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92198.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 19:59:11,335 INFO [train.py:898] (1/4) Epoch 26, batch 1350, loss[loss=0.1379, simple_loss=0.232, pruned_loss=0.02187, over 18281.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.247, pruned_loss=0.0331, over 3599157.14 frames. ], batch size: 49, lr: 4.33e-03, grad_scale: 8.0 +2023-03-09 19:59:14,646 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.584e+02 2.419e+02 2.885e+02 3.522e+02 6.118e+02, threshold=5.770e+02, percent-clipped=0.0 +2023-03-09 19:59:26,241 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6153, 3.4065, 4.5052, 4.0740, 3.2143, 2.8254, 3.9887, 4.6965], + device='cuda:1'), covar=tensor([0.0872, 0.1425, 0.0272, 0.0436, 0.0988, 0.1287, 0.0466, 0.0398], + device='cuda:1'), in_proj_covar=tensor([0.0153, 0.0283, 0.0168, 0.0187, 0.0197, 0.0196, 0.0201, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 19:59:31,623 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4623, 3.4224, 2.0609, 4.2647, 2.9055, 4.0525, 2.5514, 3.8114], + device='cuda:1'), covar=tensor([0.0686, 0.0846, 0.1585, 0.0492, 0.0856, 0.0331, 0.1123, 0.0437], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0232, 0.0196, 0.0297, 0.0197, 0.0272, 0.0208, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 20:00:03,380 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92246.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 20:00:05,013 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92247.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:00:09,204 INFO [train.py:898] (1/4) Epoch 26, batch 1400, loss[loss=0.1591, simple_loss=0.2534, pruned_loss=0.03242, over 18626.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2476, pruned_loss=0.03326, over 3601935.34 frames. ], batch size: 52, lr: 4.33e-03, grad_scale: 8.0 +2023-03-09 20:00:21,831 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4508, 2.7512, 4.0093, 3.4405, 2.5893, 4.1979, 3.7336, 2.7187], + device='cuda:1'), covar=tensor([0.0543, 0.1604, 0.0378, 0.0507, 0.1640, 0.0274, 0.0626, 0.1067], + device='cuda:1'), in_proj_covar=tensor([0.0219, 0.0246, 0.0231, 0.0172, 0.0228, 0.0221, 0.0259, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 20:01:06,841 INFO [train.py:898] (1/4) Epoch 26, batch 1450, loss[loss=0.1785, simple_loss=0.2672, pruned_loss=0.04489, over 16111.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.2473, pruned_loss=0.03315, over 3609269.75 frames. ], batch size: 94, lr: 4.33e-03, grad_scale: 8.0 +2023-03-09 20:01:10,175 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.801e+02 2.582e+02 3.137e+02 3.833e+02 9.171e+02, threshold=6.274e+02, percent-clipped=8.0 +2023-03-09 20:01:14,626 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8444, 4.1341, 2.3520, 4.2468, 5.2395, 2.7084, 4.0811, 4.0413], + device='cuda:1'), covar=tensor([0.0261, 0.1133, 0.1665, 0.0554, 0.0085, 0.1138, 0.0537, 0.0673], + device='cuda:1'), in_proj_covar=tensor([0.0180, 0.0279, 0.0207, 0.0201, 0.0138, 0.0186, 0.0222, 0.0228], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 20:01:18,982 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92311.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:01:48,291 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92337.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 20:02:04,832 INFO [train.py:898] (1/4) Epoch 26, batch 1500, loss[loss=0.1482, simple_loss=0.2347, pruned_loss=0.03087, over 18266.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.2472, pruned_loss=0.03323, over 3615045.53 frames. ], batch size: 45, lr: 4.33e-03, grad_scale: 8.0 +2023-03-09 20:03:03,798 INFO [train.py:898] (1/4) Epoch 26, batch 1550, loss[loss=0.162, simple_loss=0.25, pruned_loss=0.03697, over 18532.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.2477, pruned_loss=0.03364, over 3598552.14 frames. ], batch size: 49, lr: 4.33e-03, grad_scale: 8.0 +2023-03-09 20:03:07,217 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.867e+02 2.584e+02 2.933e+02 3.549e+02 6.992e+02, threshold=5.866e+02, percent-clipped=1.0 +2023-03-09 20:04:01,756 INFO [train.py:898] (1/4) Epoch 26, batch 1600, loss[loss=0.1253, simple_loss=0.2136, pruned_loss=0.01851, over 18493.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2485, pruned_loss=0.03355, over 3608974.09 frames. ], batch size: 44, lr: 4.33e-03, grad_scale: 8.0 +2023-03-09 20:04:33,947 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7316, 2.9221, 4.4464, 3.8749, 2.8726, 4.6599, 4.1139, 2.8636], + device='cuda:1'), covar=tensor([0.0468, 0.1498, 0.0281, 0.0425, 0.1541, 0.0218, 0.0468, 0.1051], + device='cuda:1'), in_proj_covar=tensor([0.0219, 0.0244, 0.0229, 0.0171, 0.0226, 0.0219, 0.0256, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 20:04:38,707 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-09 20:04:59,855 INFO [train.py:898] (1/4) Epoch 26, batch 1650, loss[loss=0.1378, simple_loss=0.2172, pruned_loss=0.02919, over 18503.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2481, pruned_loss=0.03339, over 3601231.13 frames. ], batch size: 44, lr: 4.33e-03, grad_scale: 8.0 +2023-03-09 20:05:02,959 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.649e+02 2.477e+02 2.880e+02 3.587e+02 5.533e+02, threshold=5.760e+02, percent-clipped=0.0 +2023-03-09 20:05:53,363 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92547.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:05:58,141 INFO [train.py:898] (1/4) Epoch 26, batch 1700, loss[loss=0.177, simple_loss=0.2687, pruned_loss=0.04264, over 18241.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.2484, pruned_loss=0.03343, over 3605925.19 frames. ], batch size: 60, lr: 4.32e-03, grad_scale: 8.0 +2023-03-09 20:06:48,784 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92595.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:06:55,615 INFO [train.py:898] (1/4) Epoch 26, batch 1750, loss[loss=0.1501, simple_loss=0.2421, pruned_loss=0.02903, over 18395.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2489, pruned_loss=0.03385, over 3575897.43 frames. ], batch size: 52, lr: 4.32e-03, grad_scale: 8.0 +2023-03-09 20:06:59,646 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.605e+02 2.546e+02 2.994e+02 3.589e+02 6.308e+02, threshold=5.987e+02, percent-clipped=1.0 +2023-03-09 20:07:00,135 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8457, 3.5457, 4.9523, 2.9609, 4.3157, 2.6128, 3.0409, 1.7794], + device='cuda:1'), covar=tensor([0.1258, 0.1006, 0.0191, 0.0986, 0.0527, 0.2605, 0.2736, 0.2325], + device='cuda:1'), in_proj_covar=tensor([0.0228, 0.0251, 0.0219, 0.0206, 0.0266, 0.0277, 0.0336, 0.0244], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 20:07:08,074 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92611.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:07:25,062 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92625.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:07:38,459 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92637.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 20:07:54,310 INFO [train.py:898] (1/4) Epoch 26, batch 1800, loss[loss=0.1431, simple_loss=0.232, pruned_loss=0.02709, over 18156.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2481, pruned_loss=0.03372, over 3567793.38 frames. ], batch size: 44, lr: 4.32e-03, grad_scale: 8.0 +2023-03-09 20:08:04,018 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92659.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:08:35,013 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92685.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 20:08:36,181 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92686.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:08:52,536 INFO [train.py:898] (1/4) Epoch 26, batch 1850, loss[loss=0.1562, simple_loss=0.2526, pruned_loss=0.02994, over 18397.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.2483, pruned_loss=0.03374, over 3576205.67 frames. ], batch size: 52, lr: 4.32e-03, grad_scale: 8.0 +2023-03-09 20:08:55,754 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.027e+02 2.840e+02 3.330e+02 3.901e+02 1.111e+03, threshold=6.660e+02, percent-clipped=3.0 +2023-03-09 20:09:16,038 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92720.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:09:51,311 INFO [train.py:898] (1/4) Epoch 26, batch 1900, loss[loss=0.1779, simple_loss=0.2634, pruned_loss=0.04624, over 12633.00 frames. ], tot_loss[loss=0.158, simple_loss=0.2485, pruned_loss=0.03369, over 3570766.31 frames. ], batch size: 129, lr: 4.32e-03, grad_scale: 8.0 +2023-03-09 20:10:28,247 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92781.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:10:50,691 INFO [train.py:898] (1/4) Epoch 26, batch 1950, loss[loss=0.1659, simple_loss=0.2565, pruned_loss=0.03764, over 17786.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.2481, pruned_loss=0.03347, over 3574483.03 frames. ], batch size: 70, lr: 4.32e-03, grad_scale: 8.0 +2023-03-09 20:10:54,087 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.627e+02 2.403e+02 2.804e+02 3.512e+02 6.323e+02, threshold=5.608e+02, percent-clipped=0.0 +2023-03-09 20:11:26,539 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92831.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:11:40,643 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.59 vs. limit=2.0 +2023-03-09 20:11:49,094 INFO [train.py:898] (1/4) Epoch 26, batch 2000, loss[loss=0.1427, simple_loss=0.2275, pruned_loss=0.02896, over 18392.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.2481, pruned_loss=0.0335, over 3577929.87 frames. ], batch size: 42, lr: 4.32e-03, grad_scale: 8.0 +2023-03-09 20:12:38,399 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92892.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:12:46,016 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92899.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:12:47,828 INFO [train.py:898] (1/4) Epoch 26, batch 2050, loss[loss=0.1727, simple_loss=0.2651, pruned_loss=0.04014, over 18378.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2487, pruned_loss=0.03397, over 3573799.84 frames. ], batch size: 56, lr: 4.32e-03, grad_scale: 8.0 +2023-03-09 20:12:51,215 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.874e+02 2.574e+02 3.005e+02 3.405e+02 1.130e+03, threshold=6.011e+02, percent-clipped=2.0 +2023-03-09 20:13:23,189 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92931.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:13:45,946 INFO [train.py:898] (1/4) Epoch 26, batch 2100, loss[loss=0.1688, simple_loss=0.2615, pruned_loss=0.03809, over 18293.00 frames. ], tot_loss[loss=0.1584, simple_loss=0.2488, pruned_loss=0.03404, over 3563631.06 frames. ], batch size: 57, lr: 4.31e-03, grad_scale: 8.0 +2023-03-09 20:13:56,547 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92960.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:14:13,908 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92975.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:14:21,749 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92981.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:14:34,843 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92992.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:14:44,671 INFO [train.py:898] (1/4) Epoch 26, batch 2150, loss[loss=0.1659, simple_loss=0.2613, pruned_loss=0.03522, over 16462.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2488, pruned_loss=0.03389, over 3579557.58 frames. ], batch size: 94, lr: 4.31e-03, grad_scale: 8.0 +2023-03-09 20:14:48,050 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.894e+02 2.711e+02 3.209e+02 3.707e+02 5.473e+02, threshold=6.417e+02, percent-clipped=0.0 +2023-03-09 20:14:55,187 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93010.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:15:26,103 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93036.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:15:26,147 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93036.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:15:43,265 INFO [train.py:898] (1/4) Epoch 26, batch 2200, loss[loss=0.2139, simple_loss=0.2922, pruned_loss=0.06776, over 12449.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2492, pruned_loss=0.03397, over 3576962.61 frames. ], batch size: 129, lr: 4.31e-03, grad_scale: 8.0 +2023-03-09 20:15:48,088 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0066, 5.0608, 5.1229, 4.8459, 4.8598, 4.8797, 5.1966, 5.1652], + device='cuda:1'), covar=tensor([0.0068, 0.0063, 0.0057, 0.0104, 0.0065, 0.0150, 0.0074, 0.0104], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0074, 0.0079, 0.0098, 0.0079, 0.0108, 0.0091, 0.0090], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 20:16:06,484 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93071.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:16:12,768 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93076.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:16:15,384 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.42 vs. limit=2.0 +2023-03-09 20:16:38,240 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93097.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 20:16:42,344 INFO [train.py:898] (1/4) Epoch 26, batch 2250, loss[loss=0.1613, simple_loss=0.2611, pruned_loss=0.03071, over 17963.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2493, pruned_loss=0.03405, over 3568743.30 frames. ], batch size: 65, lr: 4.31e-03, grad_scale: 8.0 +2023-03-09 20:16:45,627 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.732e+02 2.458e+02 2.849e+02 3.441e+02 5.512e+02, threshold=5.698e+02, percent-clipped=0.0 +2023-03-09 20:16:58,269 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93115.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:17:20,380 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0176, 4.2029, 2.6857, 4.1356, 5.3055, 2.7264, 3.9550, 4.1635], + device='cuda:1'), covar=tensor([0.0179, 0.1127, 0.1496, 0.0641, 0.0077, 0.1143, 0.0656, 0.0662], + device='cuda:1'), in_proj_covar=tensor([0.0179, 0.0276, 0.0206, 0.0199, 0.0138, 0.0186, 0.0221, 0.0228], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 20:17:41,073 INFO [train.py:898] (1/4) Epoch 26, batch 2300, loss[loss=0.1668, simple_loss=0.2593, pruned_loss=0.03716, over 18356.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2498, pruned_loss=0.03424, over 3559110.81 frames. ], batch size: 56, lr: 4.31e-03, grad_scale: 8.0 +2023-03-09 20:18:09,598 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93176.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:18:22,388 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93187.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:18:33,118 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1914, 5.2404, 5.4106, 5.4642, 5.1296, 5.9980, 5.6302, 5.1914], + device='cuda:1'), covar=tensor([0.1160, 0.0677, 0.0818, 0.0913, 0.1421, 0.0712, 0.0706, 0.1709], + device='cuda:1'), in_proj_covar=tensor([0.0369, 0.0304, 0.0326, 0.0330, 0.0343, 0.0441, 0.0296, 0.0432], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 20:18:40,066 INFO [train.py:898] (1/4) Epoch 26, batch 2350, loss[loss=0.1335, simple_loss=0.2159, pruned_loss=0.02556, over 18388.00 frames. ], tot_loss[loss=0.1581, simple_loss=0.2489, pruned_loss=0.0337, over 3566297.52 frames. ], batch size: 42, lr: 4.31e-03, grad_scale: 8.0 +2023-03-09 20:18:43,317 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.656e+02 2.468e+02 2.905e+02 3.361e+02 5.906e+02, threshold=5.811e+02, percent-clipped=1.0 +2023-03-09 20:19:38,035 INFO [train.py:898] (1/4) Epoch 26, batch 2400, loss[loss=0.1587, simple_loss=0.2565, pruned_loss=0.03049, over 16202.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.2487, pruned_loss=0.03353, over 3572893.96 frames. ], batch size: 94, lr: 4.31e-03, grad_scale: 8.0 +2023-03-09 20:19:42,637 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93255.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:19:54,496 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.48 vs. limit=5.0 +2023-03-09 20:19:55,292 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6483, 2.2855, 2.5323, 2.6372, 3.0586, 4.5777, 4.4131, 3.1132], + device='cuda:1'), covar=tensor([0.2062, 0.2707, 0.2955, 0.2017, 0.2632, 0.0300, 0.0470, 0.1171], + device='cuda:1'), in_proj_covar=tensor([0.0324, 0.0361, 0.0404, 0.0287, 0.0395, 0.0262, 0.0301, 0.0270], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 20:20:11,742 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93281.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:20:19,012 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93287.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:20:34,729 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.00 vs. limit=5.0 +2023-03-09 20:20:36,270 INFO [train.py:898] (1/4) Epoch 26, batch 2450, loss[loss=0.1485, simple_loss=0.2369, pruned_loss=0.03002, over 18499.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2477, pruned_loss=0.0332, over 3577575.16 frames. ], batch size: 47, lr: 4.31e-03, grad_scale: 8.0 +2023-03-09 20:20:39,772 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.978e+02 2.528e+02 2.957e+02 3.512e+02 5.428e+02, threshold=5.913e+02, percent-clipped=0.0 +2023-03-09 20:20:54,127 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.94 vs. limit=5.0 +2023-03-09 20:21:08,321 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93329.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:21:10,630 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93331.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:21:11,901 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2226, 5.2069, 4.8296, 5.1593, 5.1550, 4.5691, 5.0396, 4.8241], + device='cuda:1'), covar=tensor([0.0523, 0.0512, 0.1587, 0.0791, 0.0566, 0.0466, 0.0527, 0.1091], + device='cuda:1'), in_proj_covar=tensor([0.0513, 0.0578, 0.0724, 0.0450, 0.0473, 0.0523, 0.0561, 0.0695], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 20:21:33,885 INFO [train.py:898] (1/4) Epoch 26, batch 2500, loss[loss=0.1438, simple_loss=0.2295, pruned_loss=0.02906, over 18500.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2474, pruned_loss=0.03299, over 3584022.56 frames. ], batch size: 44, lr: 4.31e-03, grad_scale: 8.0 +2023-03-09 20:21:52,365 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93366.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:22:00,232 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4892, 5.4567, 5.0831, 5.4162, 5.4238, 4.8360, 5.2837, 5.0645], + device='cuda:1'), covar=tensor([0.0502, 0.0480, 0.1473, 0.0817, 0.0601, 0.0397, 0.0522, 0.1052], + device='cuda:1'), in_proj_covar=tensor([0.0514, 0.0579, 0.0724, 0.0451, 0.0473, 0.0525, 0.0563, 0.0696], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 20:22:03,736 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93376.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:22:22,159 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93392.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 20:22:32,979 INFO [train.py:898] (1/4) Epoch 26, batch 2550, loss[loss=0.1587, simple_loss=0.2553, pruned_loss=0.03111, over 18304.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2479, pruned_loss=0.03307, over 3570933.03 frames. ], batch size: 54, lr: 4.30e-03, grad_scale: 8.0 +2023-03-09 20:22:36,875 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.774e+02 2.484e+02 3.027e+02 3.757e+02 6.388e+02, threshold=6.054e+02, percent-clipped=1.0 +2023-03-09 20:23:00,411 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93424.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:23:31,986 INFO [train.py:898] (1/4) Epoch 26, batch 2600, loss[loss=0.1604, simple_loss=0.2528, pruned_loss=0.03405, over 16293.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.2472, pruned_loss=0.0328, over 3574959.88 frames. ], batch size: 94, lr: 4.30e-03, grad_scale: 8.0 +2023-03-09 20:23:42,747 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7977, 3.6267, 5.0315, 2.7737, 4.4067, 2.5216, 2.9221, 1.6465], + device='cuda:1'), covar=tensor([0.1263, 0.0971, 0.0172, 0.1040, 0.0505, 0.2734, 0.2707, 0.2426], + device='cuda:1'), in_proj_covar=tensor([0.0226, 0.0248, 0.0218, 0.0204, 0.0263, 0.0274, 0.0333, 0.0241], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 20:23:55,855 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93471.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:23:57,584 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-09 20:24:14,461 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93487.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:24:30,091 INFO [train.py:898] (1/4) Epoch 26, batch 2650, loss[loss=0.1501, simple_loss=0.2423, pruned_loss=0.02892, over 18299.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.2477, pruned_loss=0.03293, over 3585151.74 frames. ], batch size: 49, lr: 4.30e-03, grad_scale: 8.0 +2023-03-09 20:24:34,072 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.541e+02 2.517e+02 2.981e+02 3.680e+02 1.070e+03, threshold=5.961e+02, percent-clipped=2.0 +2023-03-09 20:25:09,831 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93535.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:25:27,949 INFO [train.py:898] (1/4) Epoch 26, batch 2700, loss[loss=0.1334, simple_loss=0.2131, pruned_loss=0.02682, over 18510.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.2471, pruned_loss=0.03286, over 3597498.08 frames. ], batch size: 44, lr: 4.30e-03, grad_scale: 16.0 +2023-03-09 20:25:32,741 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93555.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:25:38,166 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7279, 3.5262, 4.8825, 4.2574, 3.2184, 2.8844, 4.2753, 5.0667], + device='cuda:1'), covar=tensor([0.0847, 0.1552, 0.0221, 0.0450, 0.1051, 0.1320, 0.0432, 0.0334], + device='cuda:1'), in_proj_covar=tensor([0.0152, 0.0279, 0.0167, 0.0184, 0.0193, 0.0195, 0.0198, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 20:26:03,770 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6021, 2.3666, 2.5652, 2.6375, 3.2012, 4.7596, 4.8050, 3.2492], + device='cuda:1'), covar=tensor([0.2173, 0.2729, 0.2930, 0.2002, 0.2439, 0.0297, 0.0343, 0.1135], + device='cuda:1'), in_proj_covar=tensor([0.0323, 0.0359, 0.0404, 0.0286, 0.0393, 0.0262, 0.0300, 0.0270], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 20:26:10,322 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93587.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:26:11,660 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6790, 3.4966, 2.3441, 4.5369, 3.2089, 4.2845, 2.6352, 3.9758], + device='cuda:1'), covar=tensor([0.0658, 0.0782, 0.1405, 0.0420, 0.0813, 0.0335, 0.1119, 0.0447], + device='cuda:1'), in_proj_covar=tensor([0.0222, 0.0231, 0.0195, 0.0294, 0.0197, 0.0271, 0.0206, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 20:26:18,702 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.39 vs. limit=5.0 +2023-03-09 20:26:26,588 INFO [train.py:898] (1/4) Epoch 26, batch 2750, loss[loss=0.1692, simple_loss=0.2623, pruned_loss=0.03803, over 18311.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2467, pruned_loss=0.03286, over 3589756.92 frames. ], batch size: 57, lr: 4.30e-03, grad_scale: 16.0 +2023-03-09 20:26:29,000 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93603.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:26:29,919 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.721e+02 2.428e+02 2.838e+02 3.417e+02 1.067e+03, threshold=5.676e+02, percent-clipped=2.0 +2023-03-09 20:26:58,965 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4057, 5.9251, 5.5178, 5.7414, 5.5285, 5.3779, 5.9580, 5.9125], + device='cuda:1'), covar=tensor([0.1127, 0.0782, 0.0483, 0.0679, 0.1359, 0.0724, 0.0638, 0.0755], + device='cuda:1'), in_proj_covar=tensor([0.0628, 0.0554, 0.0398, 0.0574, 0.0773, 0.0571, 0.0785, 0.0598], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 20:27:02,327 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93631.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:27:06,746 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93635.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:27:25,263 INFO [train.py:898] (1/4) Epoch 26, batch 2800, loss[loss=0.1718, simple_loss=0.2662, pruned_loss=0.03872, over 18077.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2477, pruned_loss=0.03357, over 3570208.27 frames. ], batch size: 62, lr: 4.30e-03, grad_scale: 16.0 +2023-03-09 20:27:43,715 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93666.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:27:58,591 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93679.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:28:13,469 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93692.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:28:23,638 INFO [train.py:898] (1/4) Epoch 26, batch 2850, loss[loss=0.1368, simple_loss=0.231, pruned_loss=0.02136, over 18391.00 frames. ], tot_loss[loss=0.157, simple_loss=0.247, pruned_loss=0.03345, over 3576285.78 frames. ], batch size: 50, lr: 4.30e-03, grad_scale: 16.0 +2023-03-09 20:28:27,050 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.715e+02 2.471e+02 3.010e+02 3.579e+02 6.367e+02, threshold=6.020e+02, percent-clipped=3.0 +2023-03-09 20:28:38,552 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93714.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:28:54,381 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-09 20:29:00,504 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8810, 4.6213, 4.6522, 3.5526, 3.9125, 3.6711, 2.7827, 2.7832], + device='cuda:1'), covar=tensor([0.0241, 0.0156, 0.0085, 0.0298, 0.0337, 0.0228, 0.0700, 0.0746], + device='cuda:1'), in_proj_covar=tensor([0.0074, 0.0064, 0.0068, 0.0071, 0.0092, 0.0070, 0.0079, 0.0085], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 20:29:07,599 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-09 20:29:09,310 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93740.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:29:21,127 INFO [train.py:898] (1/4) Epoch 26, batch 2900, loss[loss=0.1592, simple_loss=0.259, pruned_loss=0.02971, over 18624.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2476, pruned_loss=0.0336, over 3573116.14 frames. ], batch size: 52, lr: 4.30e-03, grad_scale: 16.0 +2023-03-09 20:29:21,370 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4184, 5.3730, 5.6840, 5.7937, 5.2676, 6.2717, 5.9651, 5.5391], + device='cuda:1'), covar=tensor([0.1122, 0.0695, 0.0772, 0.0807, 0.1754, 0.0707, 0.0679, 0.1823], + device='cuda:1'), in_proj_covar=tensor([0.0366, 0.0302, 0.0322, 0.0330, 0.0341, 0.0437, 0.0293, 0.0430], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 20:29:37,201 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.76 vs. limit=2.0 +2023-03-09 20:29:44,548 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93770.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:29:45,665 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93771.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:29:46,052 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.55 vs. limit=2.0 +2023-03-09 20:30:19,938 INFO [train.py:898] (1/4) Epoch 26, batch 2950, loss[loss=0.156, simple_loss=0.2518, pruned_loss=0.0301, over 16035.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.2481, pruned_loss=0.0336, over 3576502.90 frames. ], batch size: 94, lr: 4.30e-03, grad_scale: 16.0 +2023-03-09 20:30:23,986 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.766e+02 2.413e+02 2.765e+02 3.459e+02 6.770e+02, threshold=5.531e+02, percent-clipped=1.0 +2023-03-09 20:30:41,671 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93819.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:30:56,226 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93831.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:31:01,014 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-09 20:31:18,477 INFO [train.py:898] (1/4) Epoch 26, batch 3000, loss[loss=0.1675, simple_loss=0.2612, pruned_loss=0.03693, over 18348.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2482, pruned_loss=0.03355, over 3576641.95 frames. ], batch size: 55, lr: 4.29e-03, grad_scale: 16.0 +2023-03-09 20:31:18,477 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 20:31:30,354 INFO [train.py:932] (1/4) Epoch 26, validation: loss=0.15, simple_loss=0.2481, pruned_loss=0.02599, over 944034.00 frames. +2023-03-09 20:31:30,355 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 20:32:28,922 INFO [train.py:898] (1/4) Epoch 26, batch 3050, loss[loss=0.1494, simple_loss=0.2323, pruned_loss=0.03327, over 17744.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2489, pruned_loss=0.03388, over 3576899.63 frames. ], batch size: 39, lr: 4.29e-03, grad_scale: 16.0 +2023-03-09 20:32:30,378 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3386, 3.2603, 3.2386, 2.9716, 3.1987, 2.6192, 2.6357, 3.3507], + device='cuda:1'), covar=tensor([0.0087, 0.0107, 0.0089, 0.0130, 0.0100, 0.0200, 0.0212, 0.0074], + device='cuda:1'), in_proj_covar=tensor([0.0152, 0.0172, 0.0145, 0.0194, 0.0152, 0.0186, 0.0190, 0.0131], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 20:32:32,222 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.657e+02 2.555e+02 3.073e+02 3.527e+02 8.787e+02, threshold=6.146e+02, percent-clipped=2.0 +2023-03-09 20:32:45,232 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6511, 3.5720, 3.5095, 2.9322, 3.3395, 2.5991, 2.7245, 3.4893], + device='cuda:1'), covar=tensor([0.0087, 0.0112, 0.0109, 0.0196, 0.0142, 0.0296, 0.0282, 0.0106], + device='cuda:1'), in_proj_covar=tensor([0.0152, 0.0172, 0.0145, 0.0194, 0.0152, 0.0186, 0.0190, 0.0131], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 20:33:27,971 INFO [train.py:898] (1/4) Epoch 26, batch 3100, loss[loss=0.1617, simple_loss=0.2573, pruned_loss=0.03304, over 18491.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2483, pruned_loss=0.03353, over 3584454.11 frames. ], batch size: 51, lr: 4.29e-03, grad_scale: 16.0 +2023-03-09 20:34:03,793 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93981.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:34:24,047 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0504, 5.4314, 2.8338, 5.2440, 5.1568, 5.4375, 5.2716, 2.8823], + device='cuda:1'), covar=tensor([0.0200, 0.0062, 0.0742, 0.0066, 0.0068, 0.0068, 0.0079, 0.0881], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0084, 0.0098, 0.0099, 0.0089, 0.0079, 0.0086, 0.0097], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 20:34:31,192 INFO [train.py:898] (1/4) Epoch 26, batch 3150, loss[loss=0.1555, simple_loss=0.2469, pruned_loss=0.03202, over 18129.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2481, pruned_loss=0.03338, over 3583409.64 frames. ], batch size: 62, lr: 4.29e-03, grad_scale: 16.0 +2023-03-09 20:34:34,547 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.522e+02 2.445e+02 2.866e+02 3.470e+02 6.892e+02, threshold=5.732e+02, percent-clipped=1.0 +2023-03-09 20:34:55,129 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8511, 3.8446, 3.6971, 3.3587, 3.6599, 3.1029, 3.0551, 3.9228], + device='cuda:1'), covar=tensor([0.0063, 0.0091, 0.0080, 0.0111, 0.0086, 0.0156, 0.0184, 0.0055], + device='cuda:1'), in_proj_covar=tensor([0.0151, 0.0171, 0.0143, 0.0192, 0.0151, 0.0185, 0.0189, 0.0130], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 20:35:19,189 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=94042.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:35:29,134 INFO [train.py:898] (1/4) Epoch 26, batch 3200, loss[loss=0.1424, simple_loss=0.2353, pruned_loss=0.02479, over 18259.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2483, pruned_loss=0.03332, over 3595799.37 frames. ], batch size: 47, lr: 4.29e-03, grad_scale: 16.0 +2023-03-09 20:36:27,601 INFO [train.py:898] (1/4) Epoch 26, batch 3250, loss[loss=0.1679, simple_loss=0.2635, pruned_loss=0.03616, over 18376.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2487, pruned_loss=0.03343, over 3588486.28 frames. ], batch size: 56, lr: 4.29e-03, grad_scale: 16.0 +2023-03-09 20:36:31,044 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.794e+02 2.553e+02 3.024e+02 3.724e+02 8.014e+02, threshold=6.047e+02, percent-clipped=2.0 +2023-03-09 20:36:50,828 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-09 20:36:57,598 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=94126.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:37:26,333 INFO [train.py:898] (1/4) Epoch 26, batch 3300, loss[loss=0.1457, simple_loss=0.2308, pruned_loss=0.03032, over 18592.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.249, pruned_loss=0.03371, over 3596874.54 frames. ], batch size: 45, lr: 4.29e-03, grad_scale: 16.0 +2023-03-09 20:38:24,385 INFO [train.py:898] (1/4) Epoch 26, batch 3350, loss[loss=0.1589, simple_loss=0.2544, pruned_loss=0.03171, over 18498.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2487, pruned_loss=0.03337, over 3603212.85 frames. ], batch size: 51, lr: 4.29e-03, grad_scale: 8.0 +2023-03-09 20:38:28,979 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.721e+02 2.569e+02 3.071e+02 3.653e+02 6.732e+02, threshold=6.142e+02, percent-clipped=1.0 +2023-03-09 20:38:50,135 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=94223.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 20:39:23,052 INFO [train.py:898] (1/4) Epoch 26, batch 3400, loss[loss=0.1512, simple_loss=0.2337, pruned_loss=0.03438, over 18430.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2487, pruned_loss=0.03333, over 3603412.02 frames. ], batch size: 43, lr: 4.29e-03, grad_scale: 8.0 +2023-03-09 20:39:34,808 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2053, 5.1614, 4.8111, 5.1416, 5.1365, 4.5691, 5.0035, 4.7966], + device='cuda:1'), covar=tensor([0.0461, 0.0516, 0.1396, 0.0773, 0.0576, 0.0440, 0.0469, 0.1055], + device='cuda:1'), in_proj_covar=tensor([0.0514, 0.0583, 0.0729, 0.0454, 0.0477, 0.0532, 0.0564, 0.0700], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 20:40:00,175 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5432, 6.0969, 5.6840, 5.9106, 5.7789, 5.5381, 6.2082, 6.1277], + device='cuda:1'), covar=tensor([0.1260, 0.0826, 0.0432, 0.0725, 0.1279, 0.0737, 0.0556, 0.0754], + device='cuda:1'), in_proj_covar=tensor([0.0632, 0.0561, 0.0404, 0.0582, 0.0781, 0.0577, 0.0795, 0.0603], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 20:40:02,512 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=94284.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 20:40:15,480 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8544, 5.0352, 2.5772, 4.9275, 4.8339, 5.0961, 4.9404, 2.7485], + device='cuda:1'), covar=tensor([0.0232, 0.0070, 0.0846, 0.0084, 0.0072, 0.0065, 0.0081, 0.0951], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0084, 0.0098, 0.0099, 0.0089, 0.0079, 0.0086, 0.0098], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 20:40:20,255 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 20:40:21,757 INFO [train.py:898] (1/4) Epoch 26, batch 3450, loss[loss=0.1422, simple_loss=0.2375, pruned_loss=0.02349, over 18386.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2481, pruned_loss=0.03327, over 3587227.51 frames. ], batch size: 50, lr: 4.28e-03, grad_scale: 8.0 +2023-03-09 20:40:26,279 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.946e+02 2.477e+02 2.921e+02 3.603e+02 6.240e+02, threshold=5.842e+02, percent-clipped=1.0 +2023-03-09 20:41:04,282 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=94337.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:41:17,116 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=94348.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:41:20,134 INFO [train.py:898] (1/4) Epoch 26, batch 3500, loss[loss=0.1402, simple_loss=0.2285, pruned_loss=0.02592, over 18394.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2478, pruned_loss=0.03335, over 3584736.77 frames. ], batch size: 48, lr: 4.28e-03, grad_scale: 8.0 +2023-03-09 20:42:01,477 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.5922, 5.0726, 5.0647, 5.1878, 4.4813, 5.0098, 4.3429, 4.9779], + device='cuda:1'), covar=tensor([0.0322, 0.0424, 0.0301, 0.0464, 0.0491, 0.0284, 0.1386, 0.0392], + device='cuda:1'), in_proj_covar=tensor([0.0236, 0.0278, 0.0279, 0.0355, 0.0287, 0.0287, 0.0322, 0.0279], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 20:42:05,548 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9155, 4.9455, 4.9778, 4.6665, 4.7091, 4.7908, 5.0355, 5.0456], + device='cuda:1'), covar=tensor([0.0073, 0.0094, 0.0078, 0.0150, 0.0073, 0.0192, 0.0098, 0.0114], + device='cuda:1'), in_proj_covar=tensor([0.0099, 0.0074, 0.0079, 0.0099, 0.0078, 0.0108, 0.0091, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 20:42:15,619 INFO [train.py:898] (1/4) Epoch 26, batch 3550, loss[loss=0.1612, simple_loss=0.2545, pruned_loss=0.03395, over 17058.00 frames. ], tot_loss[loss=0.158, simple_loss=0.2486, pruned_loss=0.03371, over 3569419.37 frames. ], batch size: 78, lr: 4.28e-03, grad_scale: 8.0 +2023-03-09 20:42:20,515 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.890e+02 2.549e+02 2.986e+02 3.546e+02 5.816e+02, threshold=5.972e+02, percent-clipped=0.0 +2023-03-09 20:42:25,129 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=94409.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 20:42:43,015 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94426.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:43:09,311 INFO [train.py:898] (1/4) Epoch 26, batch 3600, loss[loss=0.1617, simple_loss=0.25, pruned_loss=0.03672, over 18302.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.2477, pruned_loss=0.03364, over 3569112.76 frames. ], batch size: 57, lr: 4.28e-03, grad_scale: 8.0 +2023-03-09 20:43:34,192 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94474.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:44:11,702 INFO [train.py:898] (1/4) Epoch 27, batch 0, loss[loss=0.1426, simple_loss=0.2289, pruned_loss=0.02812, over 17794.00 frames. ], tot_loss[loss=0.1426, simple_loss=0.2289, pruned_loss=0.02812, over 17794.00 frames. ], batch size: 39, lr: 4.20e-03, grad_scale: 8.0 +2023-03-09 20:44:11,702 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 20:44:23,641 INFO [train.py:932] (1/4) Epoch 27, validation: loss=0.1494, simple_loss=0.2481, pruned_loss=0.02532, over 944034.00 frames. +2023-03-09 20:44:23,642 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 20:44:49,665 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.900e+02 2.525e+02 3.009e+02 3.747e+02 9.938e+02, threshold=6.019e+02, percent-clipped=2.0 +2023-03-09 20:45:22,317 INFO [train.py:898] (1/4) Epoch 27, batch 50, loss[loss=0.1506, simple_loss=0.2469, pruned_loss=0.02712, over 18571.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2474, pruned_loss=0.0334, over 801307.12 frames. ], batch size: 54, lr: 4.20e-03, grad_scale: 8.0 +2023-03-09 20:46:05,127 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8508, 4.1337, 2.6886, 4.1102, 5.1541, 2.8010, 3.8780, 3.9152], + device='cuda:1'), covar=tensor([0.0238, 0.1347, 0.1542, 0.0633, 0.0135, 0.1154, 0.0703, 0.0832], + device='cuda:1'), in_proj_covar=tensor([0.0182, 0.0278, 0.0208, 0.0201, 0.0140, 0.0187, 0.0223, 0.0231], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 20:46:09,826 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-09 20:46:13,896 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=94579.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 20:46:20,481 INFO [train.py:898] (1/4) Epoch 27, batch 100, loss[loss=0.1362, simple_loss=0.2199, pruned_loss=0.02622, over 18166.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2455, pruned_loss=0.03274, over 1426316.94 frames. ], batch size: 44, lr: 4.20e-03, grad_scale: 8.0 +2023-03-09 20:46:28,077 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 20:46:31,661 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-09 20:46:46,525 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.709e+02 2.657e+02 3.166e+02 3.640e+02 6.599e+02, threshold=6.333e+02, percent-clipped=4.0 +2023-03-09 20:46:48,033 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9376, 5.2286, 2.5205, 5.1360, 4.9989, 5.2837, 5.0558, 2.5452], + device='cuda:1'), covar=tensor([0.0230, 0.0069, 0.0882, 0.0074, 0.0076, 0.0072, 0.0086, 0.1062], + device='cuda:1'), in_proj_covar=tensor([0.0092, 0.0084, 0.0098, 0.0099, 0.0089, 0.0079, 0.0086, 0.0098], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-09 20:47:02,398 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 20:47:19,769 INFO [train.py:898] (1/4) Epoch 27, batch 150, loss[loss=0.1655, simple_loss=0.2569, pruned_loss=0.03702, over 18470.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2457, pruned_loss=0.03278, over 1907225.52 frames. ], batch size: 59, lr: 4.19e-03, grad_scale: 8.0 +2023-03-09 20:47:22,311 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94637.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:48:17,944 INFO [train.py:898] (1/4) Epoch 27, batch 200, loss[loss=0.1527, simple_loss=0.2406, pruned_loss=0.03237, over 18491.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.246, pruned_loss=0.0328, over 2276860.70 frames. ], batch size: 47, lr: 4.19e-03, grad_scale: 8.0 +2023-03-09 20:48:18,081 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94685.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:48:39,865 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=94704.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 20:48:41,886 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.913e+02 2.647e+02 3.071e+02 3.764e+02 1.112e+03, threshold=6.143e+02, percent-clipped=3.0 +2023-03-09 20:49:16,137 INFO [train.py:898] (1/4) Epoch 27, batch 250, loss[loss=0.1513, simple_loss=0.2391, pruned_loss=0.03176, over 18492.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2464, pruned_loss=0.03329, over 2571554.37 frames. ], batch size: 51, lr: 4.19e-03, grad_scale: 8.0 +2023-03-09 20:49:16,471 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4523, 5.4093, 5.0097, 5.3307, 5.3573, 4.6764, 5.2065, 4.9514], + device='cuda:1'), covar=tensor([0.0434, 0.0455, 0.1373, 0.0869, 0.0644, 0.0475, 0.0443, 0.1207], + device='cuda:1'), in_proj_covar=tensor([0.0509, 0.0574, 0.0717, 0.0446, 0.0472, 0.0523, 0.0558, 0.0689], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 20:50:14,304 INFO [train.py:898] (1/4) Epoch 27, batch 300, loss[loss=0.1447, simple_loss=0.2325, pruned_loss=0.02842, over 18480.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2467, pruned_loss=0.03312, over 2808145.52 frames. ], batch size: 47, lr: 4.19e-03, grad_scale: 8.0 +2023-03-09 20:50:38,014 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.940e+02 2.501e+02 2.879e+02 3.354e+02 8.790e+02, threshold=5.757e+02, percent-clipped=2.0 +2023-03-09 20:51:12,769 INFO [train.py:898] (1/4) Epoch 27, batch 350, loss[loss=0.1537, simple_loss=0.2415, pruned_loss=0.03289, over 18255.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2466, pruned_loss=0.0334, over 2980922.48 frames. ], batch size: 45, lr: 4.19e-03, grad_scale: 8.0 +2023-03-09 20:51:17,736 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7428, 3.6168, 2.3828, 4.5092, 3.2630, 4.3867, 2.7579, 4.0952], + device='cuda:1'), covar=tensor([0.0622, 0.0843, 0.1485, 0.0541, 0.0858, 0.0393, 0.1158, 0.0420], + device='cuda:1'), in_proj_covar=tensor([0.0222, 0.0231, 0.0194, 0.0294, 0.0197, 0.0269, 0.0206, 0.0206], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 20:52:02,470 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-09 20:52:04,421 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94879.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 20:52:11,470 INFO [train.py:898] (1/4) Epoch 27, batch 400, loss[loss=0.167, simple_loss=0.2603, pruned_loss=0.0369, over 18298.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.2471, pruned_loss=0.03341, over 3121769.37 frames. ], batch size: 49, lr: 4.19e-03, grad_scale: 8.0 +2023-03-09 20:52:34,625 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9239, 4.2020, 4.1401, 4.2414, 3.8385, 4.1389, 3.8341, 4.1493], + device='cuda:1'), covar=tensor([0.0275, 0.0340, 0.0279, 0.0519, 0.0362, 0.0267, 0.0868, 0.0347], + device='cuda:1'), in_proj_covar=tensor([0.0235, 0.0278, 0.0280, 0.0356, 0.0287, 0.0287, 0.0321, 0.0281], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 20:52:35,457 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.622e+02 2.561e+02 2.993e+02 3.675e+02 9.257e+02, threshold=5.986e+02, percent-clipped=2.0 +2023-03-09 20:53:00,408 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.66 vs. limit=2.0 +2023-03-09 20:53:00,993 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94927.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 20:53:10,446 INFO [train.py:898] (1/4) Epoch 27, batch 450, loss[loss=0.1439, simple_loss=0.2414, pruned_loss=0.02325, over 18492.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2469, pruned_loss=0.0333, over 3212206.06 frames. ], batch size: 51, lr: 4.19e-03, grad_scale: 8.0 +2023-03-09 20:53:13,091 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8031, 3.1313, 2.4986, 3.0236, 3.8159, 3.7544, 3.2224, 3.0932], + device='cuda:1'), covar=tensor([0.0178, 0.0258, 0.0649, 0.0407, 0.0159, 0.0147, 0.0368, 0.0353], + device='cuda:1'), in_proj_covar=tensor([0.0148, 0.0146, 0.0171, 0.0167, 0.0144, 0.0129, 0.0164, 0.0168], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 20:53:33,824 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-09 20:54:08,448 INFO [train.py:898] (1/4) Epoch 27, batch 500, loss[loss=0.1542, simple_loss=0.2423, pruned_loss=0.03309, over 18416.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2471, pruned_loss=0.03304, over 3307027.72 frames. ], batch size: 48, lr: 4.19e-03, grad_scale: 8.0 +2023-03-09 20:54:31,045 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95004.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:54:33,118 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.646e+02 2.379e+02 2.893e+02 3.506e+02 5.482e+02, threshold=5.786e+02, percent-clipped=0.0 +2023-03-09 20:55:06,349 INFO [train.py:898] (1/4) Epoch 27, batch 550, loss[loss=0.1489, simple_loss=0.2424, pruned_loss=0.02772, over 18259.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2472, pruned_loss=0.03305, over 3380577.96 frames. ], batch size: 47, lr: 4.19e-03, grad_scale: 8.0 +2023-03-09 20:55:26,976 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95052.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:56:04,274 INFO [train.py:898] (1/4) Epoch 27, batch 600, loss[loss=0.1559, simple_loss=0.2562, pruned_loss=0.02781, over 18551.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2471, pruned_loss=0.03299, over 3421376.82 frames. ], batch size: 54, lr: 4.19e-03, grad_scale: 8.0 +2023-03-09 20:56:10,458 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 20:56:28,266 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-09 20:56:28,735 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95105.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 20:56:29,339 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.684e+02 2.616e+02 3.098e+02 3.741e+02 7.084e+02, threshold=6.196e+02, percent-clipped=4.0 +2023-03-09 20:56:50,698 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-09 20:57:02,878 INFO [train.py:898] (1/4) Epoch 27, batch 650, loss[loss=0.1397, simple_loss=0.2301, pruned_loss=0.02467, over 18406.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.247, pruned_loss=0.03308, over 3460130.12 frames. ], batch size: 48, lr: 4.18e-03, grad_scale: 8.0 +2023-03-09 20:57:08,280 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4930, 6.0624, 5.6715, 5.8619, 5.6678, 5.4985, 6.1019, 6.0508], + device='cuda:1'), covar=tensor([0.1248, 0.0753, 0.0444, 0.0638, 0.1335, 0.0702, 0.0604, 0.0684], + device='cuda:1'), in_proj_covar=tensor([0.0633, 0.0557, 0.0402, 0.0577, 0.0778, 0.0575, 0.0790, 0.0601], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 20:57:40,168 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95166.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 20:58:02,040 INFO [train.py:898] (1/4) Epoch 27, batch 700, loss[loss=0.1528, simple_loss=0.2388, pruned_loss=0.03342, over 18508.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.2469, pruned_loss=0.03296, over 3495894.79 frames. ], batch size: 47, lr: 4.18e-03, grad_scale: 8.0 +2023-03-09 20:58:11,753 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8270, 2.5274, 2.7573, 2.8756, 3.3058, 4.9751, 4.9743, 3.4035], + device='cuda:1'), covar=tensor([0.1992, 0.2490, 0.3063, 0.1863, 0.2455, 0.0255, 0.0297, 0.1104], + device='cuda:1'), in_proj_covar=tensor([0.0327, 0.0362, 0.0408, 0.0290, 0.0398, 0.0265, 0.0305, 0.0271], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 20:58:28,135 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.730e+02 2.528e+02 2.858e+02 3.402e+02 6.027e+02, threshold=5.717e+02, percent-clipped=1.0 +2023-03-09 20:58:46,274 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9076, 4.1378, 2.5723, 4.0257, 5.2857, 2.7329, 3.6441, 3.7762], + device='cuda:1'), covar=tensor([0.0190, 0.1404, 0.1309, 0.0538, 0.0070, 0.0993, 0.0657, 0.0826], + device='cuda:1'), in_proj_covar=tensor([0.0180, 0.0275, 0.0205, 0.0198, 0.0138, 0.0185, 0.0219, 0.0228], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 20:58:55,419 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6887, 2.4759, 2.7207, 2.7448, 3.4431, 5.0078, 4.9220, 3.5360], + device='cuda:1'), covar=tensor([0.2078, 0.2572, 0.3064, 0.1933, 0.2262, 0.0234, 0.0353, 0.1005], + device='cuda:1'), in_proj_covar=tensor([0.0327, 0.0362, 0.0408, 0.0290, 0.0398, 0.0265, 0.0304, 0.0271], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 20:59:01,259 INFO [train.py:898] (1/4) Epoch 27, batch 750, loss[loss=0.1605, simple_loss=0.2582, pruned_loss=0.03138, over 18305.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2471, pruned_loss=0.03297, over 3526871.56 frames. ], batch size: 57, lr: 4.18e-03, grad_scale: 8.0 +2023-03-09 20:59:12,181 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2105, 5.2379, 5.5600, 5.5126, 5.1928, 6.0435, 5.6843, 5.2535], + device='cuda:1'), covar=tensor([0.1117, 0.0662, 0.0797, 0.0784, 0.1259, 0.0648, 0.0686, 0.1663], + device='cuda:1'), in_proj_covar=tensor([0.0375, 0.0306, 0.0330, 0.0335, 0.0343, 0.0447, 0.0298, 0.0436], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 20:59:12,737 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-09 20:59:31,745 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95260.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:59:37,264 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1597, 5.2028, 5.2423, 4.9876, 5.0292, 4.9818, 5.3196, 5.3402], + device='cuda:1'), covar=tensor([0.0059, 0.0058, 0.0053, 0.0103, 0.0049, 0.0157, 0.0062, 0.0070], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0074, 0.0080, 0.0100, 0.0079, 0.0108, 0.0091, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 20:59:40,855 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95268.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:00:00,183 INFO [train.py:898] (1/4) Epoch 27, batch 800, loss[loss=0.1545, simple_loss=0.2511, pruned_loss=0.02895, over 18492.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2469, pruned_loss=0.03277, over 3545002.78 frames. ], batch size: 51, lr: 4.18e-03, grad_scale: 8.0 +2023-03-09 21:00:25,349 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.793e+02 2.511e+02 2.986e+02 3.454e+02 7.557e+02, threshold=5.973e+02, percent-clipped=4.0 +2023-03-09 21:00:43,456 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95321.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:00:52,370 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8503, 5.3546, 5.3632, 5.3557, 4.8603, 5.3011, 4.6068, 5.1722], + device='cuda:1'), covar=tensor([0.0294, 0.0309, 0.0216, 0.0426, 0.0397, 0.0238, 0.1190, 0.0384], + device='cuda:1'), in_proj_covar=tensor([0.0236, 0.0277, 0.0279, 0.0357, 0.0287, 0.0286, 0.0321, 0.0281], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 21:00:52,435 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95329.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:00:58,803 INFO [train.py:898] (1/4) Epoch 27, batch 850, loss[loss=0.1667, simple_loss=0.2619, pruned_loss=0.03572, over 17263.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.2474, pruned_loss=0.03311, over 3543044.29 frames. ], batch size: 78, lr: 4.18e-03, grad_scale: 8.0 +2023-03-09 21:01:04,269 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95339.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:01:57,809 INFO [train.py:898] (1/4) Epoch 27, batch 900, loss[loss=0.1738, simple_loss=0.2642, pruned_loss=0.04168, over 18306.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2476, pruned_loss=0.03319, over 3554369.02 frames. ], batch size: 54, lr: 4.18e-03, grad_scale: 8.0 +2023-03-09 21:02:10,604 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5704, 2.3346, 2.5107, 2.5599, 2.8979, 4.1987, 4.1412, 2.9819], + device='cuda:1'), covar=tensor([0.2219, 0.2832, 0.3169, 0.2162, 0.2597, 0.0427, 0.0479, 0.1206], + device='cuda:1'), in_proj_covar=tensor([0.0327, 0.0362, 0.0408, 0.0290, 0.0397, 0.0265, 0.0305, 0.0270], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 21:02:16,682 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95400.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:02:24,175 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.646e+02 2.410e+02 2.846e+02 3.556e+02 5.953e+02, threshold=5.693e+02, percent-clipped=0.0 +2023-03-09 21:02:57,591 INFO [train.py:898] (1/4) Epoch 27, batch 950, loss[loss=0.1733, simple_loss=0.2598, pruned_loss=0.04339, over 17775.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2477, pruned_loss=0.03323, over 3545347.26 frames. ], batch size: 70, lr: 4.18e-03, grad_scale: 8.0 +2023-03-09 21:03:28,552 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95461.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 21:03:55,818 INFO [train.py:898] (1/4) Epoch 27, batch 1000, loss[loss=0.1373, simple_loss=0.2176, pruned_loss=0.02846, over 17257.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2476, pruned_loss=0.03335, over 3558853.47 frames. ], batch size: 38, lr: 4.18e-03, grad_scale: 8.0 +2023-03-09 21:04:19,641 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.888e+02 2.625e+02 2.944e+02 3.804e+02 7.534e+02, threshold=5.888e+02, percent-clipped=3.0 +2023-03-09 21:04:40,530 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3685, 5.9066, 5.5089, 5.6852, 5.5140, 5.3113, 5.9384, 5.9237], + device='cuda:1'), covar=tensor([0.1217, 0.0695, 0.0505, 0.0711, 0.1389, 0.0705, 0.0590, 0.0640], + device='cuda:1'), in_proj_covar=tensor([0.0630, 0.0557, 0.0402, 0.0577, 0.0775, 0.0578, 0.0787, 0.0599], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 21:04:53,989 INFO [train.py:898] (1/4) Epoch 27, batch 1050, loss[loss=0.1742, simple_loss=0.2708, pruned_loss=0.03879, over 18476.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2478, pruned_loss=0.03353, over 3568288.46 frames. ], batch size: 59, lr: 4.18e-03, grad_scale: 8.0 +2023-03-09 21:05:19,284 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-09 21:05:53,035 INFO [train.py:898] (1/4) Epoch 27, batch 1100, loss[loss=0.1626, simple_loss=0.2583, pruned_loss=0.0334, over 18396.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.248, pruned_loss=0.03354, over 3571338.26 frames. ], batch size: 52, lr: 4.17e-03, grad_scale: 8.0 +2023-03-09 21:06:17,710 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.642e+02 2.398e+02 2.884e+02 3.392e+02 9.491e+02, threshold=5.768e+02, percent-clipped=3.0 +2023-03-09 21:06:30,353 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95616.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:06:31,641 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95617.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:06:39,704 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95624.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:06:43,703 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4534, 5.4325, 5.0995, 5.3093, 5.3908, 4.7559, 5.2867, 5.0319], + device='cuda:1'), covar=tensor([0.0428, 0.0413, 0.1214, 0.0897, 0.0535, 0.0418, 0.0437, 0.1092], + device='cuda:1'), in_proj_covar=tensor([0.0515, 0.0584, 0.0725, 0.0450, 0.0473, 0.0530, 0.0564, 0.0701], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 21:06:52,366 INFO [train.py:898] (1/4) Epoch 27, batch 1150, loss[loss=0.1475, simple_loss=0.2414, pruned_loss=0.02683, over 18479.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2491, pruned_loss=0.03378, over 3575025.87 frames. ], batch size: 47, lr: 4.17e-03, grad_scale: 4.0 +2023-03-09 21:07:43,501 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95678.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:07:51,486 INFO [train.py:898] (1/4) Epoch 27, batch 1200, loss[loss=0.16, simple_loss=0.2547, pruned_loss=0.03262, over 18495.00 frames. ], tot_loss[loss=0.1585, simple_loss=0.2493, pruned_loss=0.03388, over 3565564.84 frames. ], batch size: 47, lr: 4.17e-03, grad_scale: 8.0 +2023-03-09 21:08:01,804 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4299, 6.0434, 5.5886, 5.7928, 5.6227, 5.4268, 6.0537, 6.0519], + device='cuda:1'), covar=tensor([0.1218, 0.0692, 0.0501, 0.0732, 0.1373, 0.0684, 0.0586, 0.0624], + device='cuda:1'), in_proj_covar=tensor([0.0634, 0.0559, 0.0403, 0.0580, 0.0780, 0.0582, 0.0790, 0.0604], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 21:08:02,864 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95695.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:08:06,095 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-09 21:08:16,298 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.614e+02 2.547e+02 2.881e+02 3.580e+02 8.977e+02, threshold=5.762e+02, percent-clipped=2.0 +2023-03-09 21:08:50,124 INFO [train.py:898] (1/4) Epoch 27, batch 1250, loss[loss=0.154, simple_loss=0.2508, pruned_loss=0.02858, over 17189.00 frames. ], tot_loss[loss=0.1584, simple_loss=0.2493, pruned_loss=0.03374, over 3572149.29 frames. ], batch size: 78, lr: 4.17e-03, grad_scale: 8.0 +2023-03-09 21:09:02,203 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4969, 3.4377, 3.3319, 2.9217, 3.1852, 2.6418, 2.6854, 3.3420], + device='cuda:1'), covar=tensor([0.0090, 0.0127, 0.0117, 0.0182, 0.0146, 0.0265, 0.0287, 0.0119], + device='cuda:1'), in_proj_covar=tensor([0.0156, 0.0174, 0.0147, 0.0198, 0.0156, 0.0188, 0.0192, 0.0133], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 21:09:20,633 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95761.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 21:09:49,127 INFO [train.py:898] (1/4) Epoch 27, batch 1300, loss[loss=0.1465, simple_loss=0.2352, pruned_loss=0.02885, over 18277.00 frames. ], tot_loss[loss=0.1584, simple_loss=0.2492, pruned_loss=0.03377, over 3573553.80 frames. ], batch size: 47, lr: 4.17e-03, grad_scale: 8.0 +2023-03-09 21:09:49,583 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5956, 3.0961, 3.8441, 3.5826, 3.0773, 2.9652, 3.5692, 3.9649], + device='cuda:1'), covar=tensor([0.0786, 0.1247, 0.0332, 0.0471, 0.0864, 0.1034, 0.0478, 0.0355], + device='cuda:1'), in_proj_covar=tensor([0.0155, 0.0282, 0.0170, 0.0186, 0.0196, 0.0198, 0.0200, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 21:09:52,393 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6873, 2.7206, 4.3707, 3.9200, 2.2209, 4.6682, 3.8408, 2.7975], + device='cuda:1'), covar=tensor([0.0481, 0.1831, 0.0349, 0.0351, 0.2322, 0.0252, 0.0651, 0.1270], + device='cuda:1'), in_proj_covar=tensor([0.0221, 0.0245, 0.0232, 0.0172, 0.0228, 0.0219, 0.0258, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 21:10:15,102 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.836e+02 2.534e+02 2.954e+02 3.886e+02 9.660e+02, threshold=5.908e+02, percent-clipped=7.0 +2023-03-09 21:10:17,504 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95809.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 21:10:21,571 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95812.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 21:10:48,199 INFO [train.py:898] (1/4) Epoch 27, batch 1350, loss[loss=0.1718, simple_loss=0.268, pruned_loss=0.03784, over 18468.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.2497, pruned_loss=0.03392, over 3566319.62 frames. ], batch size: 59, lr: 4.17e-03, grad_scale: 8.0 +2023-03-09 21:11:02,592 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95847.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:11:10,527 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9694, 4.6971, 4.7037, 3.6215, 3.9711, 3.7548, 3.0104, 2.8241], + device='cuda:1'), covar=tensor([0.0245, 0.0139, 0.0083, 0.0304, 0.0355, 0.0227, 0.0667, 0.0849], + device='cuda:1'), in_proj_covar=tensor([0.0076, 0.0065, 0.0069, 0.0073, 0.0095, 0.0072, 0.0081, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 21:11:17,040 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9440, 3.6375, 5.0210, 3.0438, 4.3537, 2.6519, 3.1180, 1.7330], + device='cuda:1'), covar=tensor([0.1211, 0.0957, 0.0174, 0.0934, 0.0515, 0.2525, 0.2563, 0.2405], + device='cuda:1'), in_proj_covar=tensor([0.0231, 0.0254, 0.0226, 0.0209, 0.0268, 0.0280, 0.0337, 0.0246], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 21:11:25,371 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0544, 4.3333, 2.5549, 4.1536, 5.3435, 2.7149, 3.9837, 4.1420], + device='cuda:1'), covar=tensor([0.0186, 0.1126, 0.1592, 0.0623, 0.0090, 0.1167, 0.0617, 0.0687], + device='cuda:1'), in_proj_covar=tensor([0.0183, 0.0281, 0.0209, 0.0201, 0.0141, 0.0188, 0.0222, 0.0232], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 21:11:32,151 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95873.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 21:11:45,713 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9709, 3.7414, 5.1442, 2.8816, 4.4899, 2.5913, 3.1429, 1.6866], + device='cuda:1'), covar=tensor([0.1213, 0.0988, 0.0163, 0.1047, 0.0511, 0.2719, 0.2756, 0.2439], + device='cuda:1'), in_proj_covar=tensor([0.0232, 0.0255, 0.0227, 0.0210, 0.0270, 0.0282, 0.0338, 0.0248], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 21:11:46,333 INFO [train.py:898] (1/4) Epoch 27, batch 1400, loss[loss=0.1526, simple_loss=0.2476, pruned_loss=0.02884, over 18401.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.2488, pruned_loss=0.03356, over 3583396.79 frames. ], batch size: 52, lr: 4.17e-03, grad_scale: 8.0 +2023-03-09 21:12:11,662 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.788e+02 2.502e+02 2.884e+02 3.426e+02 1.018e+03, threshold=5.768e+02, percent-clipped=3.0 +2023-03-09 21:12:13,247 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95908.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 21:12:22,268 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95916.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:12:26,843 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.68 vs. limit=5.0 +2023-03-09 21:12:32,081 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95924.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:12:45,207 INFO [train.py:898] (1/4) Epoch 27, batch 1450, loss[loss=0.1633, simple_loss=0.2598, pruned_loss=0.03341, over 18349.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.248, pruned_loss=0.0332, over 3587018.66 frames. ], batch size: 55, lr: 4.17e-03, grad_scale: 8.0 +2023-03-09 21:13:19,366 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95964.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:13:28,448 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95972.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:13:30,275 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95973.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:13:38,806 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95980.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:13:44,685 INFO [train.py:898] (1/4) Epoch 27, batch 1500, loss[loss=0.1451, simple_loss=0.2353, pruned_loss=0.02741, over 18266.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.2484, pruned_loss=0.03336, over 3581813.94 frames. ], batch size: 47, lr: 4.17e-03, grad_scale: 8.0 +2023-03-09 21:13:45,817 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-09 21:13:56,496 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95995.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:14:15,334 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.678e+02 2.536e+02 3.010e+02 3.569e+02 8.325e+02, threshold=6.021e+02, percent-clipped=2.0 +2023-03-09 21:14:23,834 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6259, 2.4241, 2.5515, 2.6101, 3.1864, 4.6834, 4.6248, 3.0797], + device='cuda:1'), covar=tensor([0.2150, 0.2698, 0.3400, 0.2203, 0.2589, 0.0330, 0.0409, 0.1269], + device='cuda:1'), in_proj_covar=tensor([0.0328, 0.0364, 0.0410, 0.0291, 0.0399, 0.0266, 0.0305, 0.0273], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:1') +2023-03-09 21:14:47,896 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3935, 5.3397, 5.0325, 5.2916, 5.3150, 4.6474, 5.2395, 4.9767], + device='cuda:1'), covar=tensor([0.0449, 0.0484, 0.1365, 0.0873, 0.0557, 0.0468, 0.0457, 0.1039], + device='cuda:1'), in_proj_covar=tensor([0.0518, 0.0587, 0.0729, 0.0453, 0.0475, 0.0533, 0.0567, 0.0702], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 21:14:48,688 INFO [train.py:898] (1/4) Epoch 27, batch 1550, loss[loss=0.1662, simple_loss=0.2662, pruned_loss=0.0331, over 18588.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2483, pruned_loss=0.03303, over 3586861.63 frames. ], batch size: 54, lr: 4.16e-03, grad_scale: 8.0 +2023-03-09 21:14:52,389 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=5.04 vs. limit=5.0 +2023-03-09 21:14:56,575 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96041.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:14:58,713 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96043.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:15:42,222 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96080.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:15:47,440 INFO [train.py:898] (1/4) Epoch 27, batch 1600, loss[loss=0.1662, simple_loss=0.2653, pruned_loss=0.0335, over 18630.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.2483, pruned_loss=0.03273, over 3588949.96 frames. ], batch size: 52, lr: 4.16e-03, grad_scale: 8.0 +2023-03-09 21:16:13,688 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.626e+02 2.554e+02 2.952e+02 3.654e+02 6.643e+02, threshold=5.903e+02, percent-clipped=2.0 +2023-03-09 21:16:38,521 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2526, 5.2722, 5.5686, 5.6253, 5.2911, 6.1109, 5.7809, 5.3843], + device='cuda:1'), covar=tensor([0.1263, 0.0649, 0.0705, 0.0710, 0.1339, 0.0668, 0.0608, 0.1712], + device='cuda:1'), in_proj_covar=tensor([0.0371, 0.0303, 0.0328, 0.0330, 0.0338, 0.0442, 0.0295, 0.0433], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 21:16:44,003 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-03-09 21:16:45,546 INFO [train.py:898] (1/4) Epoch 27, batch 1650, loss[loss=0.1512, simple_loss=0.2464, pruned_loss=0.02803, over 18406.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2485, pruned_loss=0.03272, over 3587825.49 frames. ], batch size: 52, lr: 4.16e-03, grad_scale: 8.0 +2023-03-09 21:16:53,558 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96141.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:17:21,279 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96165.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:17:21,721 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.68 vs. limit=2.0 +2023-03-09 21:17:24,604 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96168.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 21:17:44,173 INFO [train.py:898] (1/4) Epoch 27, batch 1700, loss[loss=0.1601, simple_loss=0.2517, pruned_loss=0.03426, over 18270.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.2489, pruned_loss=0.03302, over 3601631.95 frames. ], batch size: 57, lr: 4.16e-03, grad_scale: 8.0 +2023-03-09 21:18:06,259 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96203.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 21:18:10,512 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.955e+02 2.460e+02 3.228e+02 3.841e+02 7.050e+02, threshold=6.456e+02, percent-clipped=7.0 +2023-03-09 21:18:32,828 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96226.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:18:42,662 INFO [train.py:898] (1/4) Epoch 27, batch 1750, loss[loss=0.1547, simple_loss=0.2499, pruned_loss=0.02977, over 16018.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2497, pruned_loss=0.03343, over 3590915.10 frames. ], batch size: 94, lr: 4.16e-03, grad_scale: 8.0 +2023-03-09 21:19:27,922 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96273.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:19:32,689 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8367, 4.8646, 4.9463, 4.6686, 4.7053, 4.6894, 5.0341, 5.0146], + device='cuda:1'), covar=tensor([0.0082, 0.0085, 0.0068, 0.0125, 0.0066, 0.0163, 0.0079, 0.0106], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0074, 0.0080, 0.0100, 0.0079, 0.0107, 0.0091, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 21:19:41,557 INFO [train.py:898] (1/4) Epoch 27, batch 1800, loss[loss=0.1679, simple_loss=0.2544, pruned_loss=0.04064, over 18258.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2497, pruned_loss=0.03344, over 3593828.63 frames. ], batch size: 47, lr: 4.16e-03, grad_scale: 8.0 +2023-03-09 21:20:07,993 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.809e+02 2.546e+02 2.957e+02 3.630e+02 5.615e+02, threshold=5.915e+02, percent-clipped=0.0 +2023-03-09 21:20:24,642 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96321.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:20:28,163 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96324.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:20:40,257 INFO [train.py:898] (1/4) Epoch 27, batch 1850, loss[loss=0.1548, simple_loss=0.2299, pruned_loss=0.03981, over 18416.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2493, pruned_loss=0.03361, over 3592142.98 frames. ], batch size: 42, lr: 4.16e-03, grad_scale: 8.0 +2023-03-09 21:20:41,712 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96336.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:21:26,214 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0361, 2.5685, 2.3531, 2.4414, 3.0574, 2.9798, 2.7550, 2.5223], + device='cuda:1'), covar=tensor([0.0244, 0.0244, 0.0536, 0.0439, 0.0236, 0.0220, 0.0423, 0.0396], + device='cuda:1'), in_proj_covar=tensor([0.0145, 0.0146, 0.0168, 0.0166, 0.0141, 0.0128, 0.0161, 0.0164], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 21:21:37,296 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0086, 4.7863, 5.0784, 4.7367, 4.7377, 5.0151, 5.1491, 4.9998], + device='cuda:1'), covar=tensor([0.0091, 0.0118, 0.0092, 0.0149, 0.0102, 0.0146, 0.0101, 0.0158], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0075, 0.0080, 0.0100, 0.0079, 0.0107, 0.0091, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 21:21:37,985 INFO [train.py:898] (1/4) Epoch 27, batch 1900, loss[loss=0.159, simple_loss=0.2444, pruned_loss=0.03681, over 18493.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2489, pruned_loss=0.03373, over 3588735.67 frames. ], batch size: 47, lr: 4.16e-03, grad_scale: 8.0 +2023-03-09 21:21:38,388 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96385.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:21:44,133 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96390.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:22:04,759 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.806e+02 2.747e+02 3.313e+02 4.135e+02 7.560e+02, threshold=6.625e+02, percent-clipped=3.0 +2023-03-09 21:22:36,426 INFO [train.py:898] (1/4) Epoch 27, batch 1950, loss[loss=0.1613, simple_loss=0.2582, pruned_loss=0.03223, over 18337.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2485, pruned_loss=0.03357, over 3596469.57 frames. ], batch size: 56, lr: 4.16e-03, grad_scale: 8.0 +2023-03-09 21:22:37,775 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96436.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:22:55,487 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96451.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:23:15,703 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96468.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 21:23:35,122 INFO [train.py:898] (1/4) Epoch 27, batch 2000, loss[loss=0.1447, simple_loss=0.2341, pruned_loss=0.0277, over 18493.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2481, pruned_loss=0.03375, over 3584332.03 frames. ], batch size: 47, lr: 4.15e-03, grad_scale: 8.0 +2023-03-09 21:23:56,298 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96503.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:24:00,974 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.734e+02 2.669e+02 3.146e+02 3.749e+02 6.001e+02, threshold=6.292e+02, percent-clipped=0.0 +2023-03-09 21:24:11,841 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96516.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 21:24:17,439 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96521.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:24:33,558 INFO [train.py:898] (1/4) Epoch 27, batch 2050, loss[loss=0.1627, simple_loss=0.2566, pruned_loss=0.03436, over 18604.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2477, pruned_loss=0.03343, over 3578414.59 frames. ], batch size: 52, lr: 4.15e-03, grad_scale: 8.0 +2023-03-09 21:24:51,979 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96551.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:25:10,929 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2209, 5.2962, 5.5444, 5.5371, 5.2042, 6.0587, 5.7408, 5.2541], + device='cuda:1'), covar=tensor([0.1328, 0.0643, 0.0815, 0.0842, 0.1503, 0.0755, 0.0674, 0.1766], + device='cuda:1'), in_proj_covar=tensor([0.0375, 0.0304, 0.0330, 0.0333, 0.0342, 0.0445, 0.0298, 0.0436], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 21:25:32,769 INFO [train.py:898] (1/4) Epoch 27, batch 2100, loss[loss=0.1506, simple_loss=0.2302, pruned_loss=0.03547, over 18146.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.2464, pruned_loss=0.03316, over 3582242.78 frames. ], batch size: 44, lr: 4.15e-03, grad_scale: 8.0 +2023-03-09 21:25:58,863 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.510e+02 2.412e+02 2.976e+02 3.502e+02 7.599e+02, threshold=5.952e+02, percent-clipped=1.0 +2023-03-09 21:26:32,720 INFO [train.py:898] (1/4) Epoch 27, batch 2150, loss[loss=0.1706, simple_loss=0.2624, pruned_loss=0.03934, over 18362.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2465, pruned_loss=0.03298, over 3586211.87 frames. ], batch size: 56, lr: 4.15e-03, grad_scale: 8.0 +2023-03-09 21:26:34,138 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96636.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:27:25,074 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4406, 3.3661, 2.1902, 4.2225, 3.0093, 3.9110, 2.5595, 3.5997], + device='cuda:1'), covar=tensor([0.0674, 0.0828, 0.1454, 0.0444, 0.0769, 0.0369, 0.1145, 0.0508], + device='cuda:1'), in_proj_covar=tensor([0.0223, 0.0231, 0.0196, 0.0295, 0.0196, 0.0272, 0.0205, 0.0207], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 21:27:26,052 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96680.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:27:30,969 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96684.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:27:31,870 INFO [train.py:898] (1/4) Epoch 27, batch 2200, loss[loss=0.1722, simple_loss=0.266, pruned_loss=0.03917, over 18303.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2473, pruned_loss=0.03297, over 3592365.17 frames. ], batch size: 54, lr: 4.15e-03, grad_scale: 8.0 +2023-03-09 21:27:56,876 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 2.105e+02 2.631e+02 3.073e+02 3.700e+02 7.959e+02, threshold=6.147e+02, percent-clipped=1.0 +2023-03-09 21:28:29,885 INFO [train.py:898] (1/4) Epoch 27, batch 2250, loss[loss=0.1487, simple_loss=0.2429, pruned_loss=0.02723, over 18624.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2478, pruned_loss=0.03303, over 3584441.59 frames. ], batch size: 52, lr: 4.15e-03, grad_scale: 8.0 +2023-03-09 21:28:31,906 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96736.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:28:43,207 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96746.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:29:18,813 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96776.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:29:27,591 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96784.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:29:27,648 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5086, 5.3901, 5.7588, 5.8677, 5.3751, 6.3009, 5.9719, 5.4720], + device='cuda:1'), covar=tensor([0.1058, 0.0659, 0.0739, 0.0725, 0.1318, 0.0708, 0.0588, 0.1658], + device='cuda:1'), in_proj_covar=tensor([0.0372, 0.0303, 0.0330, 0.0332, 0.0342, 0.0446, 0.0298, 0.0435], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 21:29:28,577 INFO [train.py:898] (1/4) Epoch 27, batch 2300, loss[loss=0.1336, simple_loss=0.2107, pruned_loss=0.02826, over 18386.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2476, pruned_loss=0.03331, over 3560696.36 frames. ], batch size: 42, lr: 4.15e-03, grad_scale: 8.0 +2023-03-09 21:29:53,973 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.778e+02 2.521e+02 2.973e+02 3.966e+02 7.107e+02, threshold=5.946e+02, percent-clipped=4.0 +2023-03-09 21:30:03,101 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-09 21:30:11,748 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96821.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:30:12,059 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-03-09 21:30:27,080 INFO [train.py:898] (1/4) Epoch 27, batch 2350, loss[loss=0.1462, simple_loss=0.2296, pruned_loss=0.03145, over 18412.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2478, pruned_loss=0.03357, over 3576412.41 frames. ], batch size: 43, lr: 4.15e-03, grad_scale: 8.0 +2023-03-09 21:30:29,639 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96837.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:31:07,527 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96869.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:31:26,978 INFO [train.py:898] (1/4) Epoch 27, batch 2400, loss[loss=0.1833, simple_loss=0.2678, pruned_loss=0.04943, over 13355.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2474, pruned_loss=0.03351, over 3576595.92 frames. ], batch size: 130, lr: 4.15e-03, grad_scale: 8.0 +2023-03-09 21:31:46,342 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-09 21:31:52,343 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.581e+02 2.380e+02 2.700e+02 3.447e+02 6.459e+02, threshold=5.400e+02, percent-clipped=1.0 +2023-03-09 21:32:25,336 INFO [train.py:898] (1/4) Epoch 27, batch 2450, loss[loss=0.1409, simple_loss=0.2312, pruned_loss=0.02535, over 18258.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2477, pruned_loss=0.0331, over 3578613.04 frames. ], batch size: 47, lr: 4.14e-03, grad_scale: 8.0 +2023-03-09 21:33:18,684 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96980.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:33:22,363 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-09 21:33:24,785 INFO [train.py:898] (1/4) Epoch 27, batch 2500, loss[loss=0.1658, simple_loss=0.271, pruned_loss=0.03029, over 18361.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2481, pruned_loss=0.03298, over 3572431.48 frames. ], batch size: 55, lr: 4.14e-03, grad_scale: 8.0 +2023-03-09 21:33:41,726 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 21:33:42,712 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97000.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:33:50,762 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.816e+02 2.553e+02 2.843e+02 3.348e+02 7.332e+02, threshold=5.685e+02, percent-clipped=3.0 +2023-03-09 21:34:15,099 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97028.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:34:17,003 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5764, 2.4583, 2.5707, 2.6608, 3.1098, 4.7503, 4.6319, 2.9927], + device='cuda:1'), covar=tensor([0.2213, 0.2591, 0.3235, 0.2068, 0.2616, 0.0266, 0.0391, 0.1241], + device='cuda:1'), in_proj_covar=tensor([0.0330, 0.0365, 0.0412, 0.0293, 0.0400, 0.0267, 0.0305, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 21:34:23,790 INFO [train.py:898] (1/4) Epoch 27, batch 2550, loss[loss=0.1439, simple_loss=0.234, pruned_loss=0.02686, over 18362.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2486, pruned_loss=0.03337, over 3572425.97 frames. ], batch size: 46, lr: 4.14e-03, grad_scale: 8.0 +2023-03-09 21:34:36,698 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97046.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:34:54,330 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97061.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:34:55,440 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2188, 5.2489, 5.3631, 5.0385, 5.0860, 5.0715, 5.4580, 5.4005], + device='cuda:1'), covar=tensor([0.0069, 0.0067, 0.0049, 0.0110, 0.0063, 0.0165, 0.0068, 0.0088], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0075, 0.0080, 0.0100, 0.0080, 0.0109, 0.0091, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 21:35:22,674 INFO [train.py:898] (1/4) Epoch 27, batch 2600, loss[loss=0.1515, simple_loss=0.2416, pruned_loss=0.03069, over 18428.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.2484, pruned_loss=0.03335, over 3571553.22 frames. ], batch size: 48, lr: 4.14e-03, grad_scale: 8.0 +2023-03-09 21:35:32,737 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97094.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:35:47,783 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.742e+02 2.535e+02 2.936e+02 3.647e+02 8.103e+02, threshold=5.871e+02, percent-clipped=2.0 +2023-03-09 21:36:16,473 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97132.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:36:19,894 INFO [train.py:898] (1/4) Epoch 27, batch 2650, loss[loss=0.1568, simple_loss=0.2419, pruned_loss=0.03587, over 18349.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2494, pruned_loss=0.03364, over 3584181.55 frames. ], batch size: 42, lr: 4.14e-03, grad_scale: 8.0 +2023-03-09 21:37:18,525 INFO [train.py:898] (1/4) Epoch 27, batch 2700, loss[loss=0.1563, simple_loss=0.2432, pruned_loss=0.03467, over 18167.00 frames. ], tot_loss[loss=0.1584, simple_loss=0.2495, pruned_loss=0.03367, over 3572868.01 frames. ], batch size: 44, lr: 4.14e-03, grad_scale: 8.0 +2023-03-09 21:37:38,539 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9227, 4.1844, 2.4838, 4.2171, 5.2935, 3.0140, 3.7923, 3.7986], + device='cuda:1'), covar=tensor([0.0227, 0.1378, 0.1662, 0.0657, 0.0103, 0.1081, 0.0760, 0.1012], + device='cuda:1'), in_proj_covar=tensor([0.0184, 0.0283, 0.0211, 0.0201, 0.0142, 0.0188, 0.0224, 0.0233], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 21:37:38,962 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-03-09 21:37:44,982 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.852e+02 2.663e+02 3.178e+02 4.002e+02 6.925e+02, threshold=6.355e+02, percent-clipped=3.0 +2023-03-09 21:38:17,455 INFO [train.py:898] (1/4) Epoch 27, batch 2750, loss[loss=0.1438, simple_loss=0.2389, pruned_loss=0.02438, over 17167.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2488, pruned_loss=0.03336, over 3589201.87 frames. ], batch size: 78, lr: 4.14e-03, grad_scale: 8.0 +2023-03-09 21:38:47,028 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97260.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:39:13,891 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 21:39:16,418 INFO [train.py:898] (1/4) Epoch 27, batch 2800, loss[loss=0.1795, simple_loss=0.2707, pruned_loss=0.04413, over 18295.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2489, pruned_loss=0.0334, over 3592604.55 frames. ], batch size: 57, lr: 4.14e-03, grad_scale: 8.0 +2023-03-09 21:39:44,375 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.673e+02 2.484e+02 2.831e+02 3.589e+02 1.148e+03, threshold=5.662e+02, percent-clipped=3.0 +2023-03-09 21:40:00,187 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97321.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:40:01,795 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-09 21:40:15,675 INFO [train.py:898] (1/4) Epoch 27, batch 2850, loss[loss=0.1474, simple_loss=0.2427, pruned_loss=0.02607, over 18371.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2483, pruned_loss=0.03294, over 3591233.41 frames. ], batch size: 55, lr: 4.14e-03, grad_scale: 8.0 +2023-03-09 21:40:24,475 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97342.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:40:30,820 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97347.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:40:41,759 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97356.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:41:03,099 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97374.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:41:15,305 INFO [train.py:898] (1/4) Epoch 27, batch 2900, loss[loss=0.1786, simple_loss=0.2684, pruned_loss=0.0444, over 18312.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2482, pruned_loss=0.03319, over 3567020.82 frames. ], batch size: 57, lr: 4.14e-03, grad_scale: 8.0 +2023-03-09 21:41:38,550 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97403.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 21:41:43,554 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.636e+02 2.502e+02 2.896e+02 3.589e+02 7.058e+02, threshold=5.793e+02, percent-clipped=2.0 +2023-03-09 21:41:43,949 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97408.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:42:11,801 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97432.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:42:14,994 INFO [train.py:898] (1/4) Epoch 27, batch 2950, loss[loss=0.1639, simple_loss=0.2554, pruned_loss=0.03621, over 18491.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2479, pruned_loss=0.03318, over 3569833.84 frames. ], batch size: 53, lr: 4.13e-03, grad_scale: 4.0 +2023-03-09 21:42:15,352 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97435.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:43:06,325 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0961, 2.5910, 2.3654, 2.5419, 3.2562, 3.1598, 2.8950, 2.6185], + device='cuda:1'), covar=tensor([0.0222, 0.0252, 0.0510, 0.0420, 0.0219, 0.0188, 0.0403, 0.0364], + device='cuda:1'), in_proj_covar=tensor([0.0146, 0.0146, 0.0168, 0.0166, 0.0143, 0.0128, 0.0162, 0.0165], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 21:43:08,293 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97480.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:43:13,705 INFO [train.py:898] (1/4) Epoch 27, batch 3000, loss[loss=0.1731, simple_loss=0.2664, pruned_loss=0.03993, over 16222.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2485, pruned_loss=0.03348, over 3556680.55 frames. ], batch size: 94, lr: 4.13e-03, grad_scale: 4.0 +2023-03-09 21:43:13,706 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 21:43:26,376 INFO [train.py:932] (1/4) Epoch 27, validation: loss=0.1498, simple_loss=0.2479, pruned_loss=0.02584, over 944034.00 frames. +2023-03-09 21:43:26,377 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 21:43:30,497 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9433, 4.1794, 2.3282, 3.9939, 5.2498, 2.7657, 3.7589, 4.0885], + device='cuda:1'), covar=tensor([0.0173, 0.1162, 0.1714, 0.0705, 0.0094, 0.1148, 0.0705, 0.0684], + device='cuda:1'), in_proj_covar=tensor([0.0185, 0.0283, 0.0212, 0.0203, 0.0144, 0.0188, 0.0225, 0.0233], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 21:43:54,641 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.734e+02 2.624e+02 3.078e+02 3.801e+02 7.902e+02, threshold=6.156e+02, percent-clipped=2.0 +2023-03-09 21:44:09,210 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6165, 3.4033, 2.3035, 4.4235, 3.0345, 4.1841, 2.5630, 3.9618], + device='cuda:1'), covar=tensor([0.0713, 0.0926, 0.1537, 0.0481, 0.0920, 0.0343, 0.1276, 0.0418], + device='cuda:1'), in_proj_covar=tensor([0.0227, 0.0235, 0.0198, 0.0299, 0.0200, 0.0274, 0.0208, 0.0210], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 21:44:25,233 INFO [train.py:898] (1/4) Epoch 27, batch 3050, loss[loss=0.1548, simple_loss=0.2499, pruned_loss=0.02988, over 18105.00 frames. ], tot_loss[loss=0.1563, simple_loss=0.2468, pruned_loss=0.03296, over 3572372.31 frames. ], batch size: 62, lr: 4.13e-03, grad_scale: 4.0 +2023-03-09 21:44:49,627 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97555.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:45:24,074 INFO [train.py:898] (1/4) Epoch 27, batch 3100, loss[loss=0.1522, simple_loss=0.2404, pruned_loss=0.03197, over 18388.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2467, pruned_loss=0.03285, over 3574688.33 frames. ], batch size: 52, lr: 4.13e-03, grad_scale: 4.0 +2023-03-09 21:45:39,365 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-03-09 21:45:52,559 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.712e+02 2.595e+02 3.045e+02 3.637e+02 1.953e+03, threshold=6.090e+02, percent-clipped=1.0 +2023-03-09 21:46:00,621 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97616.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:46:00,863 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97616.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:46:22,622 INFO [train.py:898] (1/4) Epoch 27, batch 3150, loss[loss=0.1607, simple_loss=0.2587, pruned_loss=0.03138, over 18486.00 frames. ], tot_loss[loss=0.1557, simple_loss=0.2462, pruned_loss=0.0326, over 3584494.81 frames. ], batch size: 53, lr: 4.13e-03, grad_scale: 4.0 +2023-03-09 21:46:47,678 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97656.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:46:50,036 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3637, 3.3077, 2.0574, 4.1494, 2.8136, 3.8489, 2.4536, 3.5446], + device='cuda:1'), covar=tensor([0.0680, 0.0850, 0.1623, 0.0473, 0.0885, 0.0280, 0.1195, 0.0529], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0234, 0.0197, 0.0299, 0.0199, 0.0273, 0.0208, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 21:47:09,202 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97674.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 21:47:20,615 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-09 21:47:21,019 INFO [train.py:898] (1/4) Epoch 27, batch 3200, loss[loss=0.1378, simple_loss=0.2222, pruned_loss=0.02675, over 18512.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2461, pruned_loss=0.03251, over 3587426.20 frames. ], batch size: 44, lr: 4.13e-03, grad_scale: 8.0 +2023-03-09 21:47:36,645 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97698.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 21:47:43,537 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97703.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:47:44,756 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97704.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:47:50,082 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.671e+02 2.606e+02 3.062e+02 3.639e+02 1.226e+03, threshold=6.124e+02, percent-clipped=5.0 +2023-03-09 21:48:14,255 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-03-09 21:48:14,874 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97730.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:48:20,212 INFO [train.py:898] (1/4) Epoch 27, batch 3250, loss[loss=0.1604, simple_loss=0.2566, pruned_loss=0.03207, over 15868.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2467, pruned_loss=0.03255, over 3590046.26 frames. ], batch size: 94, lr: 4.13e-03, grad_scale: 8.0 +2023-03-09 21:48:20,643 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97735.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 21:49:19,381 INFO [train.py:898] (1/4) Epoch 27, batch 3300, loss[loss=0.1814, simple_loss=0.2708, pruned_loss=0.04601, over 18282.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2456, pruned_loss=0.03254, over 3591742.13 frames. ], batch size: 60, lr: 4.13e-03, grad_scale: 8.0 +2023-03-09 21:49:47,829 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8862, 5.3482, 5.3254, 5.3181, 4.7879, 5.2461, 4.7165, 5.2371], + device='cuda:1'), covar=tensor([0.0255, 0.0288, 0.0212, 0.0468, 0.0423, 0.0239, 0.1126, 0.0338], + device='cuda:1'), in_proj_covar=tensor([0.0236, 0.0279, 0.0280, 0.0358, 0.0288, 0.0290, 0.0322, 0.0282], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 21:49:48,578 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.710e+02 2.346e+02 2.851e+02 3.327e+02 7.225e+02, threshold=5.702e+02, percent-clipped=2.0 +2023-03-09 21:50:18,957 INFO [train.py:898] (1/4) Epoch 27, batch 3350, loss[loss=0.1575, simple_loss=0.254, pruned_loss=0.03049, over 16157.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2459, pruned_loss=0.0325, over 3602833.74 frames. ], batch size: 94, lr: 4.13e-03, grad_scale: 8.0 +2023-03-09 21:51:17,697 INFO [train.py:898] (1/4) Epoch 27, batch 3400, loss[loss=0.1821, simple_loss=0.2663, pruned_loss=0.04894, over 18392.00 frames. ], tot_loss[loss=0.1557, simple_loss=0.2462, pruned_loss=0.03262, over 3605598.92 frames. ], batch size: 52, lr: 4.12e-03, grad_scale: 8.0 +2023-03-09 21:51:21,315 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97888.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:51:45,798 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.740e+02 2.527e+02 3.006e+02 3.880e+02 7.423e+02, threshold=6.013e+02, percent-clipped=1.0 +2023-03-09 21:51:48,307 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97911.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:51:54,525 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97916.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:52:16,385 INFO [train.py:898] (1/4) Epoch 27, batch 3450, loss[loss=0.1483, simple_loss=0.2385, pruned_loss=0.02905, over 18497.00 frames. ], tot_loss[loss=0.156, simple_loss=0.2467, pruned_loss=0.03264, over 3603222.27 frames. ], batch size: 51, lr: 4.12e-03, grad_scale: 8.0 +2023-03-09 21:52:21,214 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-03-09 21:52:33,449 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97949.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:52:47,998 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5517, 2.8538, 2.4563, 2.8318, 3.6393, 3.5459, 3.1210, 2.7926], + device='cuda:1'), covar=tensor([0.0218, 0.0306, 0.0594, 0.0451, 0.0193, 0.0173, 0.0442, 0.0466], + device='cuda:1'), in_proj_covar=tensor([0.0146, 0.0146, 0.0168, 0.0166, 0.0142, 0.0128, 0.0162, 0.0165], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 21:52:51,310 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97964.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:53:15,518 INFO [train.py:898] (1/4) Epoch 27, batch 3500, loss[loss=0.1813, simple_loss=0.2698, pruned_loss=0.04643, over 18113.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2465, pruned_loss=0.03268, over 3596858.89 frames. ], batch size: 62, lr: 4.12e-03, grad_scale: 8.0 +2023-03-09 21:53:30,748 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-09 21:53:31,232 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97998.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:53:41,426 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98003.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:53:48,590 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.729e+02 2.463e+02 3.009e+02 3.694e+02 8.089e+02, threshold=6.019e+02, percent-clipped=2.0 +2023-03-09 21:54:12,088 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98030.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 21:54:12,115 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98030.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:54:17,272 INFO [train.py:898] (1/4) Epoch 27, batch 3550, loss[loss=0.1494, simple_loss=0.2405, pruned_loss=0.0291, over 18388.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2461, pruned_loss=0.03244, over 3597071.92 frames. ], batch size: 50, lr: 4.12e-03, grad_scale: 8.0 +2023-03-09 21:54:29,870 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98046.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:54:35,249 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98051.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:54:46,146 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6101, 2.9003, 2.6039, 2.8916, 3.7114, 3.6102, 3.1433, 2.9387], + device='cuda:1'), covar=tensor([0.0198, 0.0294, 0.0532, 0.0415, 0.0177, 0.0150, 0.0370, 0.0395], + device='cuda:1'), in_proj_covar=tensor([0.0146, 0.0147, 0.0168, 0.0167, 0.0143, 0.0129, 0.0163, 0.0166], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 21:55:04,914 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98078.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:55:12,351 INFO [train.py:898] (1/4) Epoch 27, batch 3600, loss[loss=0.143, simple_loss=0.2332, pruned_loss=0.02641, over 18374.00 frames. ], tot_loss[loss=0.1557, simple_loss=0.2465, pruned_loss=0.03247, over 3595678.49 frames. ], batch size: 46, lr: 4.12e-03, grad_scale: 8.0 +2023-03-09 21:55:35,530 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98106.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:55:38,135 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.847e+02 2.404e+02 2.873e+02 3.414e+02 5.036e+02, threshold=5.745e+02, percent-clipped=0.0 +2023-03-09 21:56:16,247 INFO [train.py:898] (1/4) Epoch 28, batch 0, loss[loss=0.1598, simple_loss=0.2488, pruned_loss=0.03536, over 18616.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.2488, pruned_loss=0.03536, over 18616.00 frames. ], batch size: 52, lr: 4.04e-03, grad_scale: 8.0 +2023-03-09 21:56:16,248 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 21:56:28,180 INFO [train.py:932] (1/4) Epoch 28, validation: loss=0.1499, simple_loss=0.2483, pruned_loss=0.02581, over 944034.00 frames. +2023-03-09 21:56:28,181 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 21:57:24,354 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98167.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:57:26,202 INFO [train.py:898] (1/4) Epoch 28, batch 50, loss[loss=0.141, simple_loss=0.2237, pruned_loss=0.02919, over 18388.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2454, pruned_loss=0.032, over 817467.90 frames. ], batch size: 42, lr: 4.04e-03, grad_scale: 8.0 +2023-03-09 21:58:15,011 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.839e+02 2.500e+02 2.819e+02 3.475e+02 6.212e+02, threshold=5.638e+02, percent-clipped=1.0 +2023-03-09 21:58:17,537 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98211.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:58:26,488 INFO [train.py:898] (1/4) Epoch 28, batch 100, loss[loss=0.138, simple_loss=0.2234, pruned_loss=0.02627, over 18353.00 frames. ], tot_loss[loss=0.155, simple_loss=0.2461, pruned_loss=0.03193, over 1439328.54 frames. ], batch size: 42, lr: 4.04e-03, grad_scale: 8.0 +2023-03-09 21:58:34,798 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5383, 6.0407, 5.6798, 5.8147, 5.6246, 5.4690, 6.1230, 6.0599], + device='cuda:1'), covar=tensor([0.1158, 0.0796, 0.0444, 0.0728, 0.1486, 0.0722, 0.0563, 0.0747], + device='cuda:1'), in_proj_covar=tensor([0.0635, 0.0559, 0.0399, 0.0581, 0.0778, 0.0575, 0.0788, 0.0607], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 21:58:41,226 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6837, 2.3006, 2.6813, 2.6754, 3.2364, 4.9427, 4.7405, 3.3997], + device='cuda:1'), covar=tensor([0.2058, 0.2674, 0.3206, 0.2096, 0.2496, 0.0236, 0.0388, 0.1073], + device='cuda:1'), in_proj_covar=tensor([0.0329, 0.0364, 0.0410, 0.0292, 0.0397, 0.0268, 0.0302, 0.0272], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 21:58:53,009 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0406, 4.2642, 2.5435, 4.1118, 5.3680, 2.8979, 3.9424, 4.0717], + device='cuda:1'), covar=tensor([0.0211, 0.1260, 0.1691, 0.0670, 0.0099, 0.1199, 0.0715, 0.0737], + device='cuda:1'), in_proj_covar=tensor([0.0185, 0.0282, 0.0210, 0.0202, 0.0143, 0.0187, 0.0223, 0.0232], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 21:58:55,974 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98244.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:59:14,188 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98259.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:59:25,571 INFO [train.py:898] (1/4) Epoch 28, batch 150, loss[loss=0.1441, simple_loss=0.2349, pruned_loss=0.02665, over 18348.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2463, pruned_loss=0.03228, over 1917361.34 frames. ], batch size: 46, lr: 4.04e-03, grad_scale: 8.0 +2023-03-09 21:59:37,589 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7650, 3.7129, 5.1044, 4.5954, 3.4253, 3.2083, 4.5713, 5.3853], + device='cuda:1'), covar=tensor([0.0856, 0.1689, 0.0238, 0.0387, 0.0963, 0.1170, 0.0376, 0.0217], + device='cuda:1'), in_proj_covar=tensor([0.0154, 0.0284, 0.0172, 0.0188, 0.0198, 0.0198, 0.0201, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 22:00:12,026 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3031, 5.2778, 4.8987, 5.2386, 5.2297, 4.5925, 5.1014, 4.8997], + device='cuda:1'), covar=tensor([0.0451, 0.0453, 0.1300, 0.0755, 0.0579, 0.0476, 0.0482, 0.1033], + device='cuda:1'), in_proj_covar=tensor([0.0516, 0.0589, 0.0728, 0.0455, 0.0474, 0.0538, 0.0572, 0.0701], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 22:00:13,389 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8589, 3.4536, 5.1069, 2.9736, 4.4432, 2.6377, 3.0598, 1.9158], + device='cuda:1'), covar=tensor([0.1235, 0.1083, 0.0142, 0.0989, 0.0499, 0.2665, 0.2689, 0.2216], + device='cuda:1'), in_proj_covar=tensor([0.0231, 0.0254, 0.0228, 0.0209, 0.0265, 0.0280, 0.0337, 0.0246], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 22:00:14,047 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.664e+02 2.460e+02 2.957e+02 3.466e+02 5.415e+02, threshold=5.915e+02, percent-clipped=0.0 +2023-03-09 22:00:25,421 INFO [train.py:898] (1/4) Epoch 28, batch 200, loss[loss=0.1337, simple_loss=0.221, pruned_loss=0.02321, over 18493.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2465, pruned_loss=0.03225, over 2290138.87 frames. ], batch size: 47, lr: 4.04e-03, grad_scale: 8.0 +2023-03-09 22:00:38,058 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98330.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 22:00:52,413 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7457, 3.6669, 5.0575, 4.4483, 3.5187, 3.0883, 4.4915, 5.3295], + device='cuda:1'), covar=tensor([0.0834, 0.1561, 0.0202, 0.0424, 0.0882, 0.1201, 0.0403, 0.0225], + device='cuda:1'), in_proj_covar=tensor([0.0153, 0.0284, 0.0172, 0.0187, 0.0197, 0.0198, 0.0201, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 22:01:24,229 INFO [train.py:898] (1/4) Epoch 28, batch 250, loss[loss=0.1821, simple_loss=0.2783, pruned_loss=0.04292, over 18225.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.2474, pruned_loss=0.03244, over 2581036.07 frames. ], batch size: 60, lr: 4.04e-03, grad_scale: 8.0 +2023-03-09 22:01:30,445 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8928, 4.1845, 2.4769, 3.9811, 5.1672, 2.6282, 3.7533, 4.1253], + device='cuda:1'), covar=tensor([0.0197, 0.1253, 0.1666, 0.0650, 0.0110, 0.1244, 0.0730, 0.0637], + device='cuda:1'), in_proj_covar=tensor([0.0183, 0.0280, 0.0209, 0.0201, 0.0143, 0.0186, 0.0222, 0.0231], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 22:01:34,699 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98378.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 22:01:43,815 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9610, 3.4264, 4.6979, 3.9019, 3.0861, 4.9243, 4.1429, 3.2927], + device='cuda:1'), covar=tensor([0.0482, 0.1209, 0.0232, 0.0476, 0.1353, 0.0190, 0.0552, 0.0829], + device='cuda:1'), in_proj_covar=tensor([0.0223, 0.0246, 0.0233, 0.0174, 0.0229, 0.0221, 0.0260, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 22:02:11,171 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.633e+02 2.515e+02 2.972e+02 3.446e+02 6.764e+02, threshold=5.944e+02, percent-clipped=3.0 +2023-03-09 22:02:12,588 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98410.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:02:23,112 INFO [train.py:898] (1/4) Epoch 28, batch 300, loss[loss=0.1607, simple_loss=0.2582, pruned_loss=0.03158, over 18299.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2468, pruned_loss=0.03241, over 2814762.30 frames. ], batch size: 57, lr: 4.04e-03, grad_scale: 8.0 +2023-03-09 22:02:32,673 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9564, 3.9673, 4.0478, 3.8649, 3.9350, 3.9119, 4.0406, 4.0846], + device='cuda:1'), covar=tensor([0.0084, 0.0086, 0.0083, 0.0114, 0.0076, 0.0149, 0.0082, 0.0095], + device='cuda:1'), in_proj_covar=tensor([0.0101, 0.0075, 0.0080, 0.0100, 0.0080, 0.0109, 0.0092, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 22:02:56,643 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7644, 2.5193, 2.6759, 2.7993, 3.3972, 4.8909, 4.8668, 3.2736], + device='cuda:1'), covar=tensor([0.1994, 0.2498, 0.2972, 0.1998, 0.2268, 0.0269, 0.0338, 0.1141], + device='cuda:1'), in_proj_covar=tensor([0.0331, 0.0365, 0.0412, 0.0293, 0.0398, 0.0268, 0.0302, 0.0273], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 22:03:14,400 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98462.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:03:22,723 INFO [train.py:898] (1/4) Epoch 28, batch 350, loss[loss=0.146, simple_loss=0.2392, pruned_loss=0.02641, over 18556.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.2474, pruned_loss=0.03274, over 2982734.09 frames. ], batch size: 54, lr: 4.04e-03, grad_scale: 8.0 +2023-03-09 22:03:25,381 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98471.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:04:09,220 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.910e+02 2.547e+02 2.961e+02 3.445e+02 6.973e+02, threshold=5.923e+02, percent-clipped=1.0 +2023-03-09 22:04:21,294 INFO [train.py:898] (1/4) Epoch 28, batch 400, loss[loss=0.1711, simple_loss=0.2693, pruned_loss=0.03644, over 18252.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2479, pruned_loss=0.03253, over 3126575.82 frames. ], batch size: 60, lr: 4.04e-03, grad_scale: 8.0 +2023-03-09 22:04:37,378 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-09 22:04:43,822 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6263, 2.9735, 2.6266, 2.8830, 3.7045, 3.5659, 3.2234, 2.9418], + device='cuda:1'), covar=tensor([0.0165, 0.0284, 0.0545, 0.0406, 0.0183, 0.0186, 0.0378, 0.0408], + device='cuda:1'), in_proj_covar=tensor([0.0147, 0.0147, 0.0168, 0.0168, 0.0144, 0.0130, 0.0163, 0.0166], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 22:04:49,543 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98543.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:04:50,565 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98544.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:05:20,073 INFO [train.py:898] (1/4) Epoch 28, batch 450, loss[loss=0.1436, simple_loss=0.2296, pruned_loss=0.02876, over 18259.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2469, pruned_loss=0.03249, over 3225834.16 frames. ], batch size: 47, lr: 4.04e-03, grad_scale: 8.0 +2023-03-09 22:05:46,787 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98592.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:06:01,918 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98604.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:06:07,231 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.606e+02 2.466e+02 2.902e+02 3.493e+02 6.837e+02, threshold=5.804e+02, percent-clipped=4.0 +2023-03-09 22:06:13,866 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9616, 4.5348, 4.5862, 3.5593, 3.7837, 3.5639, 2.8554, 2.8928], + device='cuda:1'), covar=tensor([0.0211, 0.0173, 0.0085, 0.0290, 0.0339, 0.0244, 0.0674, 0.0782], + device='cuda:1'), in_proj_covar=tensor([0.0075, 0.0063, 0.0069, 0.0071, 0.0094, 0.0071, 0.0078, 0.0087], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0007, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 22:06:19,750 INFO [train.py:898] (1/4) Epoch 28, batch 500, loss[loss=0.1333, simple_loss=0.2202, pruned_loss=0.02316, over 18419.00 frames. ], tot_loss[loss=0.1552, simple_loss=0.2462, pruned_loss=0.03207, over 3309301.46 frames. ], batch size: 42, lr: 4.03e-03, grad_scale: 8.0 +2023-03-09 22:06:24,719 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6667, 3.0003, 4.2960, 3.5005, 2.6150, 4.5142, 3.8232, 2.8655], + device='cuda:1'), covar=tensor([0.0533, 0.1420, 0.0305, 0.0577, 0.1658, 0.0234, 0.0600, 0.1018], + device='cuda:1'), in_proj_covar=tensor([0.0222, 0.0245, 0.0233, 0.0174, 0.0228, 0.0221, 0.0260, 0.0198], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 22:07:14,943 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.24 vs. limit=5.0 +2023-03-09 22:07:18,532 INFO [train.py:898] (1/4) Epoch 28, batch 550, loss[loss=0.1615, simple_loss=0.2556, pruned_loss=0.03376, over 18571.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2462, pruned_loss=0.03227, over 3365287.97 frames. ], batch size: 54, lr: 4.03e-03, grad_scale: 8.0 +2023-03-09 22:07:38,672 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.68 vs. limit=2.0 +2023-03-09 22:08:05,609 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.504e+02 2.472e+02 2.832e+02 3.276e+02 6.900e+02, threshold=5.664e+02, percent-clipped=1.0 +2023-03-09 22:08:17,664 INFO [train.py:898] (1/4) Epoch 28, batch 600, loss[loss=0.1484, simple_loss=0.2424, pruned_loss=0.02725, over 18343.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2449, pruned_loss=0.03163, over 3423136.25 frames. ], batch size: 55, lr: 4.03e-03, grad_scale: 8.0 +2023-03-09 22:08:38,850 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4120, 3.4254, 2.1119, 4.2984, 2.9490, 4.0739, 2.3989, 3.7383], + device='cuda:1'), covar=tensor([0.0759, 0.0895, 0.1603, 0.0525, 0.0853, 0.0392, 0.1320, 0.0506], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0235, 0.0196, 0.0299, 0.0198, 0.0274, 0.0209, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 22:09:08,708 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98762.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:09:12,990 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98766.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:09:16,298 INFO [train.py:898] (1/4) Epoch 28, batch 650, loss[loss=0.1585, simple_loss=0.2532, pruned_loss=0.03186, over 17006.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.2454, pruned_loss=0.03175, over 3470622.91 frames. ], batch size: 78, lr: 4.03e-03, grad_scale: 8.0 +2023-03-09 22:09:42,187 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-09 22:10:03,543 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.904e+02 2.586e+02 3.038e+02 3.660e+02 8.440e+02, threshold=6.077e+02, percent-clipped=4.0 +2023-03-09 22:10:04,946 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98810.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:10:15,437 INFO [train.py:898] (1/4) Epoch 28, batch 700, loss[loss=0.1531, simple_loss=0.243, pruned_loss=0.03159, over 18401.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2455, pruned_loss=0.03183, over 3493267.62 frames. ], batch size: 52, lr: 4.03e-03, grad_scale: 8.0 +2023-03-09 22:10:44,949 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4211, 6.0095, 5.6026, 5.8237, 5.6019, 5.4561, 6.0505, 6.0531], + device='cuda:1'), covar=tensor([0.1186, 0.0759, 0.0442, 0.0611, 0.1306, 0.0643, 0.0568, 0.0608], + device='cuda:1'), in_proj_covar=tensor([0.0643, 0.0566, 0.0406, 0.0586, 0.0789, 0.0583, 0.0803, 0.0613], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 22:11:14,533 INFO [train.py:898] (1/4) Epoch 28, batch 750, loss[loss=0.1711, simple_loss=0.2654, pruned_loss=0.03841, over 18300.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2455, pruned_loss=0.0319, over 3511554.40 frames. ], batch size: 54, lr: 4.03e-03, grad_scale: 8.0 +2023-03-09 22:11:39,942 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98890.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:11:46,329 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98895.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:11:50,704 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98899.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:12:02,001 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.555e+02 2.589e+02 3.008e+02 3.946e+02 6.579e+02, threshold=6.017e+02, percent-clipped=1.0 +2023-03-09 22:12:13,946 INFO [train.py:898] (1/4) Epoch 28, batch 800, loss[loss=0.1773, simple_loss=0.2672, pruned_loss=0.04365, over 17994.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.246, pruned_loss=0.03213, over 3530165.01 frames. ], batch size: 65, lr: 4.03e-03, grad_scale: 8.0 +2023-03-09 22:12:52,308 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98951.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:12:58,102 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98956.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:12:59,642 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.56 vs. limit=2.0 +2023-03-09 22:13:12,395 INFO [train.py:898] (1/4) Epoch 28, batch 850, loss[loss=0.1567, simple_loss=0.2507, pruned_loss=0.03132, over 17703.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2463, pruned_loss=0.03233, over 3544260.20 frames. ], batch size: 70, lr: 4.03e-03, grad_scale: 8.0 +2023-03-09 22:13:15,055 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98971.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:14:00,115 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.700e+02 2.503e+02 2.990e+02 3.802e+02 7.789e+02, threshold=5.979e+02, percent-clipped=2.0 +2023-03-09 22:14:11,481 INFO [train.py:898] (1/4) Epoch 28, batch 900, loss[loss=0.156, simple_loss=0.2324, pruned_loss=0.03975, over 17688.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2463, pruned_loss=0.0324, over 3559090.27 frames. ], batch size: 39, lr: 4.03e-03, grad_scale: 8.0 +2023-03-09 22:14:14,897 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-09 22:14:27,825 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99032.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:14:49,945 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99051.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:15:07,187 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99066.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:15:10,379 INFO [train.py:898] (1/4) Epoch 28, batch 950, loss[loss=0.1541, simple_loss=0.2529, pruned_loss=0.02765, over 18400.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2459, pruned_loss=0.03252, over 3565995.69 frames. ], batch size: 48, lr: 4.02e-03, grad_scale: 8.0 +2023-03-09 22:15:39,263 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0530, 4.2669, 2.6609, 4.1771, 5.3313, 2.9203, 3.8373, 4.2060], + device='cuda:1'), covar=tensor([0.0199, 0.1379, 0.1657, 0.0683, 0.0125, 0.1201, 0.0731, 0.0629], + device='cuda:1'), in_proj_covar=tensor([0.0184, 0.0280, 0.0209, 0.0201, 0.0143, 0.0185, 0.0222, 0.0231], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 22:15:57,908 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.971e+02 2.497e+02 3.033e+02 3.631e+02 7.579e+02, threshold=6.066e+02, percent-clipped=1.0 +2023-03-09 22:16:01,732 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99112.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:16:03,845 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99114.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:16:09,568 INFO [train.py:898] (1/4) Epoch 28, batch 1000, loss[loss=0.1431, simple_loss=0.2319, pruned_loss=0.02712, over 18332.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.2455, pruned_loss=0.03237, over 3576069.13 frames. ], batch size: 46, lr: 4.02e-03, grad_scale: 8.0 +2023-03-09 22:17:08,447 INFO [train.py:898] (1/4) Epoch 28, batch 1050, loss[loss=0.1792, simple_loss=0.2682, pruned_loss=0.0451, over 18032.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.246, pruned_loss=0.03257, over 3567718.04 frames. ], batch size: 65, lr: 4.02e-03, grad_scale: 8.0 +2023-03-09 22:17:43,473 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99199.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:17:55,235 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.556e+02 2.410e+02 2.895e+02 3.644e+02 7.761e+02, threshold=5.789e+02, percent-clipped=3.0 +2023-03-09 22:18:06,851 INFO [train.py:898] (1/4) Epoch 28, batch 1100, loss[loss=0.1309, simple_loss=0.2137, pruned_loss=0.02406, over 18521.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2463, pruned_loss=0.03277, over 3572945.91 frames. ], batch size: 44, lr: 4.02e-03, grad_scale: 8.0 +2023-03-09 22:18:18,530 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99229.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:18:39,026 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99246.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:18:40,202 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99247.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:18:44,752 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99251.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:19:05,421 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99268.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:19:06,229 INFO [train.py:898] (1/4) Epoch 28, batch 1150, loss[loss=0.1494, simple_loss=0.2456, pruned_loss=0.02659, over 17063.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2465, pruned_loss=0.03267, over 3581474.15 frames. ], batch size: 78, lr: 4.02e-03, grad_scale: 8.0 +2023-03-09 22:19:31,249 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99290.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:19:53,825 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.536e+02 2.497e+02 2.883e+02 3.542e+02 9.328e+02, threshold=5.765e+02, percent-clipped=1.0 +2023-03-09 22:20:00,946 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8638, 5.3993, 2.7944, 5.2463, 5.1942, 5.4189, 5.2027, 2.6333], + device='cuda:1'), covar=tensor([0.0240, 0.0052, 0.0750, 0.0071, 0.0060, 0.0061, 0.0078, 0.1033], + device='cuda:1'), in_proj_covar=tensor([0.0094, 0.0084, 0.0098, 0.0100, 0.0091, 0.0080, 0.0087, 0.0099], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-09 22:20:05,823 INFO [train.py:898] (1/4) Epoch 28, batch 1200, loss[loss=0.1499, simple_loss=0.2467, pruned_loss=0.02655, over 17195.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.246, pruned_loss=0.03228, over 3586733.54 frames. ], batch size: 78, lr: 4.02e-03, grad_scale: 8.0 +2023-03-09 22:20:08,779 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-03-09 22:20:11,989 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99324.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:20:15,124 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99327.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:20:17,743 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99329.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:20:54,486 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2041, 5.2888, 5.5230, 5.5691, 5.0575, 6.0534, 5.6762, 5.3215], + device='cuda:1'), covar=tensor([0.1107, 0.0592, 0.0852, 0.0748, 0.1288, 0.0734, 0.0622, 0.1675], + device='cuda:1'), in_proj_covar=tensor([0.0373, 0.0304, 0.0330, 0.0333, 0.0340, 0.0442, 0.0296, 0.0433], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 22:21:04,011 INFO [train.py:898] (1/4) Epoch 28, batch 1250, loss[loss=0.1277, simple_loss=0.2129, pruned_loss=0.02124, over 18441.00 frames. ], tot_loss[loss=0.1552, simple_loss=0.2459, pruned_loss=0.03231, over 3603027.68 frames. ], batch size: 43, lr: 4.02e-03, grad_scale: 8.0 +2023-03-09 22:21:23,329 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99385.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:21:43,162 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.54 vs. limit=5.0 +2023-03-09 22:21:49,412 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99407.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:21:51,417 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.740e+02 2.716e+02 3.087e+02 3.833e+02 7.403e+02, threshold=6.174e+02, percent-clipped=2.0 +2023-03-09 22:22:03,378 INFO [train.py:898] (1/4) Epoch 28, batch 1300, loss[loss=0.1628, simple_loss=0.2535, pruned_loss=0.03607, over 18332.00 frames. ], tot_loss[loss=0.156, simple_loss=0.2468, pruned_loss=0.03261, over 3607006.43 frames. ], batch size: 55, lr: 4.02e-03, grad_scale: 16.0 +2023-03-09 22:22:04,892 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99420.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:22:12,614 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-09 22:22:44,377 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8126, 2.9683, 2.6729, 2.9947, 3.8287, 3.7471, 3.2522, 3.0114], + device='cuda:1'), covar=tensor([0.0156, 0.0293, 0.0550, 0.0385, 0.0155, 0.0156, 0.0389, 0.0422], + device='cuda:1'), in_proj_covar=tensor([0.0149, 0.0148, 0.0170, 0.0169, 0.0144, 0.0131, 0.0164, 0.0166], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 22:23:01,821 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99468.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 22:23:02,621 INFO [train.py:898] (1/4) Epoch 28, batch 1350, loss[loss=0.1322, simple_loss=0.2117, pruned_loss=0.02635, over 18423.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2462, pruned_loss=0.03246, over 3610975.97 frames. ], batch size: 43, lr: 4.02e-03, grad_scale: 16.0 +2023-03-09 22:23:03,176 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-03-09 22:23:04,534 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-09 22:23:16,673 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-09 22:23:17,480 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99481.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:23:20,106 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.16 vs. limit=5.0 +2023-03-09 22:23:49,954 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.936e+02 2.551e+02 3.034e+02 3.736e+02 7.669e+02, threshold=6.068e+02, percent-clipped=1.0 +2023-03-09 22:24:01,496 INFO [train.py:898] (1/4) Epoch 28, batch 1400, loss[loss=0.1531, simple_loss=0.2484, pruned_loss=0.02889, over 18349.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.2465, pruned_loss=0.03281, over 3595672.35 frames. ], batch size: 55, lr: 4.02e-03, grad_scale: 16.0 +2023-03-09 22:24:13,809 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99529.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 22:24:33,035 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99546.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:24:39,414 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99551.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:24:59,073 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99568.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:24:59,945 INFO [train.py:898] (1/4) Epoch 28, batch 1450, loss[loss=0.13, simple_loss=0.2185, pruned_loss=0.02069, over 18500.00 frames. ], tot_loss[loss=0.1563, simple_loss=0.2469, pruned_loss=0.03286, over 3594137.78 frames. ], batch size: 44, lr: 4.01e-03, grad_scale: 16.0 +2023-03-09 22:25:09,859 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7639, 3.0337, 2.7846, 2.9313, 3.7719, 3.7104, 3.2637, 2.9944], + device='cuda:1'), covar=tensor([0.0182, 0.0264, 0.0498, 0.0426, 0.0183, 0.0150, 0.0391, 0.0415], + device='cuda:1'), in_proj_covar=tensor([0.0149, 0.0148, 0.0170, 0.0169, 0.0145, 0.0131, 0.0163, 0.0167], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 22:25:19,494 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99585.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:25:24,253 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9829, 4.6568, 4.6160, 3.6583, 3.8295, 3.7115, 2.8655, 2.8034], + device='cuda:1'), covar=tensor([0.0231, 0.0144, 0.0094, 0.0281, 0.0395, 0.0204, 0.0634, 0.0758], + device='cuda:1'), in_proj_covar=tensor([0.0076, 0.0064, 0.0070, 0.0073, 0.0095, 0.0072, 0.0079, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 22:25:24,277 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99589.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:25:29,569 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99594.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:25:35,183 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99599.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:25:48,004 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.800e+02 2.454e+02 2.982e+02 3.510e+02 7.061e+02, threshold=5.964e+02, percent-clipped=2.0 +2023-03-09 22:25:59,259 INFO [train.py:898] (1/4) Epoch 28, batch 1500, loss[loss=0.1533, simple_loss=0.2463, pruned_loss=0.0302, over 18508.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2462, pruned_loss=0.03269, over 3594608.60 frames. ], batch size: 51, lr: 4.01e-03, grad_scale: 16.0 +2023-03-09 22:26:05,313 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99624.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:26:09,190 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99627.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:26:11,619 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99629.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:26:20,499 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0107, 5.5132, 3.1217, 5.3043, 5.2367, 5.5057, 5.3503, 2.9302], + device='cuda:1'), covar=tensor([0.0217, 0.0059, 0.0623, 0.0067, 0.0068, 0.0064, 0.0078, 0.0876], + device='cuda:1'), in_proj_covar=tensor([0.0095, 0.0084, 0.0100, 0.0100, 0.0091, 0.0081, 0.0088, 0.0100], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-09 22:26:35,806 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99650.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:26:58,070 INFO [train.py:898] (1/4) Epoch 28, batch 1550, loss[loss=0.1782, simple_loss=0.2726, pruned_loss=0.04187, over 18627.00 frames. ], tot_loss[loss=0.156, simple_loss=0.2465, pruned_loss=0.03271, over 3592853.62 frames. ], batch size: 52, lr: 4.01e-03, grad_scale: 16.0 +2023-03-09 22:27:05,106 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99675.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:27:11,285 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99680.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:27:11,484 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99680.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:27:11,498 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8137, 3.1436, 2.5099, 3.0172, 3.8660, 3.7114, 3.3858, 3.1736], + device='cuda:1'), covar=tensor([0.0201, 0.0223, 0.0662, 0.0339, 0.0147, 0.0165, 0.0310, 0.0310], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0149, 0.0170, 0.0170, 0.0146, 0.0133, 0.0164, 0.0168], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 22:27:15,094 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-09 22:27:43,219 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99707.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:27:45,105 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.790e+02 2.548e+02 3.050e+02 3.629e+02 6.290e+02, threshold=6.100e+02, percent-clipped=1.0 +2023-03-09 22:27:54,721 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.98 vs. limit=5.0 +2023-03-09 22:27:57,321 INFO [train.py:898] (1/4) Epoch 28, batch 1600, loss[loss=0.1356, simple_loss=0.2255, pruned_loss=0.02284, over 18369.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.2467, pruned_loss=0.03274, over 3586963.11 frames. ], batch size: 46, lr: 4.01e-03, grad_scale: 16.0 +2023-03-09 22:28:02,638 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-09 22:28:10,217 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8519, 4.3201, 4.2736, 3.2954, 3.5682, 3.4410, 2.5102, 2.5629], + device='cuda:1'), covar=tensor([0.0204, 0.0168, 0.0106, 0.0350, 0.0373, 0.0236, 0.0759, 0.0826], + device='cuda:1'), in_proj_covar=tensor([0.0075, 0.0064, 0.0070, 0.0072, 0.0094, 0.0072, 0.0079, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 22:28:22,826 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99741.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:28:39,286 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99755.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:28:56,209 INFO [train.py:898] (1/4) Epoch 28, batch 1650, loss[loss=0.1335, simple_loss=0.2181, pruned_loss=0.02445, over 18470.00 frames. ], tot_loss[loss=0.1557, simple_loss=0.2466, pruned_loss=0.03243, over 3587785.80 frames. ], batch size: 44, lr: 4.01e-03, grad_scale: 16.0 +2023-03-09 22:29:04,447 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99776.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:29:42,826 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.627e+02 2.331e+02 2.829e+02 3.453e+02 7.033e+02, threshold=5.658e+02, percent-clipped=1.0 +2023-03-09 22:29:55,638 INFO [train.py:898] (1/4) Epoch 28, batch 1700, loss[loss=0.1657, simple_loss=0.2582, pruned_loss=0.03654, over 18158.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2464, pruned_loss=0.03213, over 3586913.62 frames. ], batch size: 62, lr: 4.01e-03, grad_scale: 16.0 +2023-03-09 22:30:01,605 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99824.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 22:30:54,420 INFO [train.py:898] (1/4) Epoch 28, batch 1750, loss[loss=0.198, simple_loss=0.2817, pruned_loss=0.0572, over 12982.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2455, pruned_loss=0.03212, over 3582544.39 frames. ], batch size: 129, lr: 4.01e-03, grad_scale: 16.0 +2023-03-09 22:31:12,562 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99885.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:31:31,081 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4981, 3.4777, 2.3624, 4.3723, 3.0204, 4.1315, 2.6772, 3.9320], + device='cuda:1'), covar=tensor([0.0758, 0.0824, 0.1414, 0.0518, 0.0891, 0.0340, 0.1099, 0.0441], + device='cuda:1'), in_proj_covar=tensor([0.0226, 0.0234, 0.0196, 0.0299, 0.0200, 0.0274, 0.0208, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 22:31:39,959 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.785e+02 2.621e+02 3.029e+02 3.741e+02 5.430e+02, threshold=6.058e+02, percent-clipped=0.0 +2023-03-09 22:31:51,913 INFO [train.py:898] (1/4) Epoch 28, batch 1800, loss[loss=0.1503, simple_loss=0.2438, pruned_loss=0.02843, over 18569.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2465, pruned_loss=0.03253, over 3580139.76 frames. ], batch size: 54, lr: 4.01e-03, grad_scale: 16.0 +2023-03-09 22:31:58,545 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99924.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:31:58,695 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99924.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:32:08,640 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99933.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:32:22,795 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99945.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:32:33,621 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.56 vs. limit=5.0 +2023-03-09 22:32:51,450 INFO [train.py:898] (1/4) Epoch 28, batch 1850, loss[loss=0.1355, simple_loss=0.2161, pruned_loss=0.02748, over 17758.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2461, pruned_loss=0.03223, over 3579444.22 frames. ], batch size: 39, lr: 4.01e-03, grad_scale: 16.0 +2023-03-09 22:32:55,877 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99972.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:33:05,211 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99980.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:33:41,424 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100007.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 22:33:43,357 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.762e+02 2.401e+02 2.680e+02 3.285e+02 5.327e+02, threshold=5.360e+02, percent-clipped=0.0 +2023-03-09 22:33:55,961 INFO [train.py:898] (1/4) Epoch 28, batch 1900, loss[loss=0.1661, simple_loss=0.2675, pruned_loss=0.03234, over 18491.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.246, pruned_loss=0.03246, over 3565019.44 frames. ], batch size: 53, lr: 4.01e-03, grad_scale: 16.0 +2023-03-09 22:34:06,780 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100028.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:34:15,691 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100036.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:34:20,549 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9501, 3.8112, 5.2404, 2.9557, 4.5710, 2.6506, 3.2110, 1.6945], + device='cuda:1'), covar=tensor([0.1250, 0.0931, 0.0158, 0.1026, 0.0495, 0.2666, 0.2692, 0.2448], + device='cuda:1'), in_proj_covar=tensor([0.0231, 0.0253, 0.0228, 0.0208, 0.0265, 0.0278, 0.0335, 0.0245], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 22:34:53,693 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100068.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 22:34:54,378 INFO [train.py:898] (1/4) Epoch 28, batch 1950, loss[loss=0.1831, simple_loss=0.2798, pruned_loss=0.04322, over 18136.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2454, pruned_loss=0.03218, over 3579563.42 frames. ], batch size: 62, lr: 4.00e-03, grad_scale: 16.0 +2023-03-09 22:35:03,377 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100076.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:35:41,385 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.807e+02 2.428e+02 2.748e+02 3.405e+02 9.642e+02, threshold=5.496e+02, percent-clipped=3.0 +2023-03-09 22:35:53,069 INFO [train.py:898] (1/4) Epoch 28, batch 2000, loss[loss=0.1401, simple_loss=0.2306, pruned_loss=0.02481, over 18354.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.2457, pruned_loss=0.03228, over 3584185.53 frames. ], batch size: 50, lr: 4.00e-03, grad_scale: 16.0 +2023-03-09 22:35:59,731 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100124.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:35:59,818 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100124.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 22:36:14,332 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8743, 3.0484, 2.8251, 3.0782, 3.8908, 3.8287, 3.3945, 3.1398], + device='cuda:1'), covar=tensor([0.0252, 0.0312, 0.0512, 0.0377, 0.0175, 0.0155, 0.0347, 0.0354], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0149, 0.0170, 0.0170, 0.0145, 0.0132, 0.0163, 0.0167], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 22:36:21,178 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7771, 3.6236, 4.9263, 2.9614, 4.3300, 2.6276, 3.0605, 1.7462], + device='cuda:1'), covar=tensor([0.1271, 0.0962, 0.0177, 0.0974, 0.0512, 0.2722, 0.2625, 0.2398], + device='cuda:1'), in_proj_covar=tensor([0.0233, 0.0255, 0.0230, 0.0210, 0.0267, 0.0282, 0.0339, 0.0248], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 22:36:52,990 INFO [train.py:898] (1/4) Epoch 28, batch 2050, loss[loss=0.1755, simple_loss=0.2657, pruned_loss=0.04267, over 17900.00 frames. ], tot_loss[loss=0.155, simple_loss=0.2458, pruned_loss=0.03204, over 3592737.68 frames. ], batch size: 65, lr: 4.00e-03, grad_scale: 16.0 +2023-03-09 22:36:56,633 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100172.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 22:37:11,060 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6465, 3.4901, 2.1608, 4.4684, 3.2443, 3.9678, 2.5874, 3.8153], + device='cuda:1'), covar=tensor([0.0655, 0.0922, 0.1751, 0.0491, 0.0853, 0.0380, 0.1339, 0.0566], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0233, 0.0196, 0.0298, 0.0198, 0.0273, 0.0207, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 22:37:33,173 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2610, 5.2575, 5.5075, 5.5237, 5.1909, 6.0807, 5.6965, 5.3311], + device='cuda:1'), covar=tensor([0.1183, 0.0630, 0.0904, 0.0703, 0.1285, 0.0676, 0.0658, 0.1835], + device='cuda:1'), in_proj_covar=tensor([0.0378, 0.0309, 0.0335, 0.0337, 0.0344, 0.0452, 0.0302, 0.0445], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 22:37:39,777 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.832e+02 2.490e+02 2.932e+02 3.409e+02 5.416e+02, threshold=5.865e+02, percent-clipped=0.0 +2023-03-09 22:37:51,112 INFO [train.py:898] (1/4) Epoch 28, batch 2100, loss[loss=0.1466, simple_loss=0.2374, pruned_loss=0.02788, over 18485.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2462, pruned_loss=0.03218, over 3594260.19 frames. ], batch size: 47, lr: 4.00e-03, grad_scale: 16.0 +2023-03-09 22:37:57,844 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100224.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:38:14,629 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100238.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:38:22,649 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100245.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:38:50,260 INFO [train.py:898] (1/4) Epoch 28, batch 2150, loss[loss=0.1518, simple_loss=0.254, pruned_loss=0.02486, over 18400.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2457, pruned_loss=0.03205, over 3601155.98 frames. ], batch size: 52, lr: 4.00e-03, grad_scale: 16.0 +2023-03-09 22:38:54,618 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100272.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:39:00,467 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9411, 5.7785, 5.5097, 5.6758, 5.1893, 5.5517, 5.9464, 5.7562], + device='cuda:1'), covar=tensor([0.2516, 0.1223, 0.0778, 0.1171, 0.2691, 0.1200, 0.0943, 0.1144], + device='cuda:1'), in_proj_covar=tensor([0.0633, 0.0555, 0.0399, 0.0579, 0.0776, 0.0576, 0.0791, 0.0607], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 22:39:10,575 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6088, 3.1283, 3.8245, 3.5763, 3.0807, 2.9534, 3.5659, 3.9249], + device='cuda:1'), covar=tensor([0.0746, 0.1082, 0.0358, 0.0485, 0.0832, 0.1063, 0.0469, 0.0497], + device='cuda:1'), in_proj_covar=tensor([0.0155, 0.0286, 0.0174, 0.0188, 0.0199, 0.0199, 0.0204, 0.0216], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 22:39:19,975 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100293.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:39:27,049 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100299.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:39:38,057 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.888e+02 2.494e+02 2.925e+02 3.420e+02 7.055e+02, threshold=5.850e+02, percent-clipped=2.0 +2023-03-09 22:39:50,186 INFO [train.py:898] (1/4) Epoch 28, batch 2200, loss[loss=0.1398, simple_loss=0.2259, pruned_loss=0.0269, over 18121.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.246, pruned_loss=0.03207, over 3602745.75 frames. ], batch size: 44, lr: 4.00e-03, grad_scale: 16.0 +2023-03-09 22:39:59,993 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9925, 5.1436, 5.2839, 5.4460, 4.9611, 5.8832, 5.5425, 5.1473], + device='cuda:1'), covar=tensor([0.1179, 0.0649, 0.0887, 0.0707, 0.1312, 0.0690, 0.0654, 0.1759], + device='cuda:1'), in_proj_covar=tensor([0.0375, 0.0306, 0.0333, 0.0334, 0.0341, 0.0447, 0.0300, 0.0442], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 22:40:11,029 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100336.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:40:43,177 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100363.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 22:40:49,836 INFO [train.py:898] (1/4) Epoch 28, batch 2250, loss[loss=0.1522, simple_loss=0.2464, pruned_loss=0.02905, over 18620.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2469, pruned_loss=0.03219, over 3591297.89 frames. ], batch size: 52, lr: 4.00e-03, grad_scale: 16.0 +2023-03-09 22:41:07,380 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100384.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:41:29,650 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100402.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:41:37,421 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.960e+02 2.460e+02 2.795e+02 3.389e+02 7.041e+02, threshold=5.590e+02, percent-clipped=1.0 +2023-03-09 22:41:49,304 INFO [train.py:898] (1/4) Epoch 28, batch 2300, loss[loss=0.1758, simple_loss=0.2665, pruned_loss=0.0425, over 18222.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2469, pruned_loss=0.03245, over 3598907.61 frames. ], batch size: 60, lr: 4.00e-03, grad_scale: 16.0 +2023-03-09 22:42:41,820 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100463.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 22:42:48,930 INFO [train.py:898] (1/4) Epoch 28, batch 2350, loss[loss=0.1688, simple_loss=0.2579, pruned_loss=0.03987, over 18353.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2458, pruned_loss=0.03183, over 3608038.05 frames. ], batch size: 56, lr: 4.00e-03, grad_scale: 16.0 +2023-03-09 22:43:34,978 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1156, 3.0818, 3.0079, 2.7558, 2.9773, 2.4995, 2.5779, 3.1274], + device='cuda:1'), covar=tensor([0.0088, 0.0118, 0.0110, 0.0151, 0.0119, 0.0209, 0.0229, 0.0087], + device='cuda:1'), in_proj_covar=tensor([0.0159, 0.0178, 0.0148, 0.0198, 0.0157, 0.0190, 0.0193, 0.0136], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 22:43:36,867 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.732e+02 2.362e+02 2.730e+02 3.461e+02 8.407e+02, threshold=5.459e+02, percent-clipped=2.0 +2023-03-09 22:43:48,264 INFO [train.py:898] (1/4) Epoch 28, batch 2400, loss[loss=0.1506, simple_loss=0.2385, pruned_loss=0.03137, over 18272.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2462, pruned_loss=0.03183, over 3601888.28 frames. ], batch size: 47, lr: 4.00e-03, grad_scale: 16.0 +2023-03-09 22:44:21,860 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.2635, 2.7470, 2.3499, 2.7019, 3.3933, 3.3163, 3.0062, 2.6863], + device='cuda:1'), covar=tensor([0.0220, 0.0286, 0.0624, 0.0415, 0.0216, 0.0208, 0.0404, 0.0380], + device='cuda:1'), in_proj_covar=tensor([0.0147, 0.0147, 0.0168, 0.0167, 0.0143, 0.0129, 0.0161, 0.0164], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 22:44:46,627 INFO [train.py:898] (1/4) Epoch 28, batch 2450, loss[loss=0.1501, simple_loss=0.2326, pruned_loss=0.03383, over 18484.00 frames. ], tot_loss[loss=0.1552, simple_loss=0.2466, pruned_loss=0.03194, over 3599322.70 frames. ], batch size: 44, lr: 3.99e-03, grad_scale: 16.0 +2023-03-09 22:45:16,707 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100594.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:45:34,576 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.748e+02 2.510e+02 2.947e+02 3.432e+02 5.400e+02, threshold=5.894e+02, percent-clipped=0.0 +2023-03-09 22:45:45,722 INFO [train.py:898] (1/4) Epoch 28, batch 2500, loss[loss=0.188, simple_loss=0.2724, pruned_loss=0.05177, over 12354.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2468, pruned_loss=0.032, over 3588155.68 frames. ], batch size: 130, lr: 3.99e-03, grad_scale: 16.0 +2023-03-09 22:45:46,146 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8023, 3.2204, 4.4918, 3.7605, 2.7754, 4.7552, 3.9723, 3.1220], + device='cuda:1'), covar=tensor([0.0498, 0.1282, 0.0276, 0.0503, 0.1553, 0.0207, 0.0584, 0.0859], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0247, 0.0236, 0.0175, 0.0231, 0.0221, 0.0258, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 22:46:39,052 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100663.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 22:46:45,728 INFO [train.py:898] (1/4) Epoch 28, batch 2550, loss[loss=0.147, simple_loss=0.2402, pruned_loss=0.02692, over 18370.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.247, pruned_loss=0.03207, over 3584206.97 frames. ], batch size: 50, lr: 3.99e-03, grad_scale: 16.0 +2023-03-09 22:47:23,698 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4533, 3.3652, 2.1086, 4.2848, 2.9742, 3.9346, 2.3976, 3.7361], + device='cuda:1'), covar=tensor([0.0716, 0.0885, 0.1676, 0.0534, 0.0884, 0.0340, 0.1275, 0.0481], + device='cuda:1'), in_proj_covar=tensor([0.0224, 0.0233, 0.0195, 0.0297, 0.0198, 0.0273, 0.0207, 0.0208], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 22:47:33,004 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.831e+02 2.422e+02 2.744e+02 3.332e+02 5.019e+02, threshold=5.488e+02, percent-clipped=0.0 +2023-03-09 22:47:36,121 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100711.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 22:47:44,774 INFO [train.py:898] (1/4) Epoch 28, batch 2600, loss[loss=0.152, simple_loss=0.233, pruned_loss=0.03545, over 18428.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.2479, pruned_loss=0.03245, over 3580801.63 frames. ], batch size: 42, lr: 3.99e-03, grad_scale: 16.0 +2023-03-09 22:47:58,475 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100730.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 22:48:31,050 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100758.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 22:48:43,892 INFO [train.py:898] (1/4) Epoch 28, batch 2650, loss[loss=0.1653, simple_loss=0.2553, pruned_loss=0.0377, over 18264.00 frames. ], tot_loss[loss=0.1563, simple_loss=0.2475, pruned_loss=0.03253, over 3585174.31 frames. ], batch size: 57, lr: 3.99e-03, grad_scale: 16.0 +2023-03-09 22:49:09,498 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100791.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 22:49:31,291 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.698e+02 2.383e+02 2.847e+02 3.406e+02 8.018e+02, threshold=5.694e+02, percent-clipped=5.0 +2023-03-09 22:49:43,074 INFO [train.py:898] (1/4) Epoch 28, batch 2700, loss[loss=0.1687, simple_loss=0.26, pruned_loss=0.03874, over 18361.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2475, pruned_loss=0.03278, over 3583266.17 frames. ], batch size: 56, lr: 3.99e-03, grad_scale: 16.0 +2023-03-09 22:50:41,483 INFO [train.py:898] (1/4) Epoch 28, batch 2750, loss[loss=0.1365, simple_loss=0.2196, pruned_loss=0.02668, over 18413.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2478, pruned_loss=0.03282, over 3582235.76 frames. ], batch size: 43, lr: 3.99e-03, grad_scale: 16.0 +2023-03-09 22:51:05,221 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6901, 3.6076, 2.3871, 4.4956, 3.2102, 4.3374, 2.8474, 4.0562], + device='cuda:1'), covar=tensor([0.0625, 0.0807, 0.1406, 0.0488, 0.0814, 0.0280, 0.1054, 0.0428], + device='cuda:1'), in_proj_covar=tensor([0.0224, 0.0234, 0.0195, 0.0299, 0.0199, 0.0273, 0.0207, 0.0209], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 22:51:10,540 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100894.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:51:16,740 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-09 22:51:22,005 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1754, 5.0338, 5.1903, 4.8920, 4.9156, 5.1616, 5.3229, 5.1609], + device='cuda:1'), covar=tensor([0.0085, 0.0085, 0.0089, 0.0156, 0.0071, 0.0159, 0.0094, 0.0137], + device='cuda:1'), in_proj_covar=tensor([0.0101, 0.0075, 0.0080, 0.0101, 0.0080, 0.0109, 0.0092, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 22:51:27,171 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.680e+02 2.345e+02 2.707e+02 3.433e+02 5.623e+02, threshold=5.414e+02, percent-clipped=0.0 +2023-03-09 22:51:40,784 INFO [train.py:898] (1/4) Epoch 28, batch 2800, loss[loss=0.1471, simple_loss=0.2278, pruned_loss=0.03317, over 18156.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2479, pruned_loss=0.03268, over 3576137.42 frames. ], batch size: 44, lr: 3.99e-03, grad_scale: 16.0 +2023-03-09 22:52:07,461 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100942.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:52:22,923 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-09 22:52:38,800 INFO [train.py:898] (1/4) Epoch 28, batch 2850, loss[loss=0.1503, simple_loss=0.2461, pruned_loss=0.02731, over 16946.00 frames. ], tot_loss[loss=0.1557, simple_loss=0.2469, pruned_loss=0.03228, over 3589509.24 frames. ], batch size: 78, lr: 3.99e-03, grad_scale: 16.0 +2023-03-09 22:53:02,047 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6631, 5.4439, 5.1667, 5.3329, 4.8416, 5.2438, 5.7025, 5.5094], + device='cuda:1'), covar=tensor([0.2716, 0.1357, 0.1004, 0.1288, 0.2814, 0.1316, 0.0978, 0.1256], + device='cuda:1'), in_proj_covar=tensor([0.0636, 0.0561, 0.0405, 0.0584, 0.0780, 0.0581, 0.0793, 0.0612], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 22:53:26,392 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.467e+02 2.435e+02 2.818e+02 3.350e+02 5.539e+02, threshold=5.635e+02, percent-clipped=3.0 +2023-03-09 22:53:31,514 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-09 22:53:37,848 INFO [train.py:898] (1/4) Epoch 28, batch 2900, loss[loss=0.1473, simple_loss=0.2318, pruned_loss=0.03143, over 18506.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2468, pruned_loss=0.03224, over 3601530.94 frames. ], batch size: 47, lr: 3.99e-03, grad_scale: 16.0 +2023-03-09 22:54:25,084 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=101058.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:54:37,490 INFO [train.py:898] (1/4) Epoch 28, batch 2950, loss[loss=0.1648, simple_loss=0.2628, pruned_loss=0.03339, over 18113.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.2462, pruned_loss=0.03205, over 3600706.52 frames. ], batch size: 62, lr: 3.98e-03, grad_scale: 16.0 +2023-03-09 22:54:48,725 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-09 22:54:58,138 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101086.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 22:55:21,287 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=101106.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:55:22,654 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101107.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:55:24,472 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.540e+02 2.503e+02 3.089e+02 3.620e+02 7.481e+02, threshold=6.178e+02, percent-clipped=1.0 +2023-03-09 22:55:35,815 INFO [train.py:898] (1/4) Epoch 28, batch 3000, loss[loss=0.1447, simple_loss=0.2275, pruned_loss=0.03095, over 18402.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2463, pruned_loss=0.03234, over 3573926.30 frames. ], batch size: 42, lr: 3.98e-03, grad_scale: 16.0 +2023-03-09 22:55:35,815 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 22:55:47,891 INFO [train.py:932] (1/4) Epoch 28, validation: loss=0.1496, simple_loss=0.2475, pruned_loss=0.02587, over 944034.00 frames. +2023-03-09 22:55:47,892 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 22:56:09,375 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-09 22:56:45,952 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=101168.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:56:46,677 INFO [train.py:898] (1/4) Epoch 28, batch 3050, loss[loss=0.1371, simple_loss=0.2291, pruned_loss=0.02252, over 18398.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2461, pruned_loss=0.03246, over 3575155.18 frames. ], batch size: 48, lr: 3.98e-03, grad_scale: 16.0 +2023-03-09 22:57:20,953 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2332, 5.2817, 5.5364, 5.5320, 5.1952, 6.0713, 5.6949, 5.4100], + device='cuda:1'), covar=tensor([0.1166, 0.0606, 0.0867, 0.0823, 0.1314, 0.0737, 0.0695, 0.1781], + device='cuda:1'), in_proj_covar=tensor([0.0377, 0.0309, 0.0336, 0.0337, 0.0343, 0.0452, 0.0303, 0.0444], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 22:57:32,631 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.576e+02 2.344e+02 2.759e+02 3.609e+02 9.420e+02, threshold=5.517e+02, percent-clipped=1.0 +2023-03-09 22:57:45,987 INFO [train.py:898] (1/4) Epoch 28, batch 3100, loss[loss=0.1398, simple_loss=0.2269, pruned_loss=0.02632, over 18507.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2459, pruned_loss=0.03236, over 3575899.02 frames. ], batch size: 47, lr: 3.98e-03, grad_scale: 16.0 +2023-03-09 22:57:48,851 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8890, 4.2560, 2.5421, 4.0201, 5.2348, 2.5651, 3.8115, 4.1392], + device='cuda:1'), covar=tensor([0.0235, 0.1188, 0.1748, 0.0716, 0.0104, 0.1516, 0.0760, 0.0726], + device='cuda:1'), in_proj_covar=tensor([0.0187, 0.0284, 0.0211, 0.0202, 0.0146, 0.0188, 0.0225, 0.0233], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 22:58:09,355 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7120, 4.1864, 2.3565, 3.9359, 5.1293, 2.5591, 3.7500, 3.9088], + device='cuda:1'), covar=tensor([0.0251, 0.1039, 0.1687, 0.0687, 0.0093, 0.1247, 0.0698, 0.0759], + device='cuda:1'), in_proj_covar=tensor([0.0187, 0.0283, 0.0210, 0.0201, 0.0145, 0.0187, 0.0223, 0.0231], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 22:58:45,527 INFO [train.py:898] (1/4) Epoch 28, batch 3150, loss[loss=0.1571, simple_loss=0.2526, pruned_loss=0.03076, over 18493.00 frames. ], tot_loss[loss=0.156, simple_loss=0.2464, pruned_loss=0.03282, over 3554476.74 frames. ], batch size: 51, lr: 3.98e-03, grad_scale: 16.0 +2023-03-09 22:59:31,368 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.645e+02 2.594e+02 3.004e+02 3.371e+02 7.669e+02, threshold=6.008e+02, percent-clipped=3.0 +2023-03-09 22:59:43,362 INFO [train.py:898] (1/4) Epoch 28, batch 3200, loss[loss=0.1672, simple_loss=0.2544, pruned_loss=0.03996, over 16172.00 frames. ], tot_loss[loss=0.1563, simple_loss=0.2469, pruned_loss=0.03284, over 3560120.59 frames. ], batch size: 94, lr: 3.98e-03, grad_scale: 16.0 +2023-03-09 23:00:18,398 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8780, 4.9552, 2.5644, 4.7453, 4.7220, 4.9493, 4.7529, 2.6612], + device='cuda:1'), covar=tensor([0.0223, 0.0066, 0.0823, 0.0099, 0.0069, 0.0069, 0.0094, 0.1013], + device='cuda:1'), in_proj_covar=tensor([0.0094, 0.0084, 0.0098, 0.0099, 0.0091, 0.0080, 0.0087, 0.0099], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-09 23:00:41,853 INFO [train.py:898] (1/4) Epoch 28, batch 3250, loss[loss=0.1518, simple_loss=0.2479, pruned_loss=0.02787, over 18515.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2464, pruned_loss=0.03241, over 3560113.49 frames. ], batch size: 53, lr: 3.98e-03, grad_scale: 16.0 +2023-03-09 23:00:56,903 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4354, 5.3741, 5.7653, 5.7114, 5.3670, 6.2648, 5.9358, 5.4718], + device='cuda:1'), covar=tensor([0.1093, 0.0609, 0.0728, 0.0726, 0.1339, 0.0631, 0.0607, 0.1522], + device='cuda:1'), in_proj_covar=tensor([0.0381, 0.0312, 0.0339, 0.0339, 0.0347, 0.0455, 0.0306, 0.0448], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 23:01:02,768 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=101386.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 23:01:10,057 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4152, 5.3322, 5.6543, 5.6444, 5.3369, 6.1864, 5.8562, 5.4358], + device='cuda:1'), covar=tensor([0.1104, 0.0572, 0.0801, 0.0805, 0.1301, 0.0669, 0.0591, 0.1586], + device='cuda:1'), in_proj_covar=tensor([0.0380, 0.0311, 0.0339, 0.0339, 0.0347, 0.0454, 0.0305, 0.0447], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 23:01:29,437 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.724e+02 2.697e+02 3.326e+02 3.955e+02 8.507e+02, threshold=6.653e+02, percent-clipped=3.0 +2023-03-09 23:01:40,848 INFO [train.py:898] (1/4) Epoch 28, batch 3300, loss[loss=0.1552, simple_loss=0.2509, pruned_loss=0.02973, over 18563.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2462, pruned_loss=0.03229, over 3567135.57 frames. ], batch size: 54, lr: 3.98e-03, grad_scale: 32.0 +2023-03-09 23:01:55,570 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1802, 5.7709, 5.4090, 5.4778, 5.3687, 5.1401, 5.8240, 5.7670], + device='cuda:1'), covar=tensor([0.1183, 0.0800, 0.0572, 0.0782, 0.1415, 0.0751, 0.0567, 0.0739], + device='cuda:1'), in_proj_covar=tensor([0.0643, 0.0569, 0.0409, 0.0592, 0.0787, 0.0588, 0.0801, 0.0617], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 23:02:00,150 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=101434.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 23:02:33,304 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101463.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:02:39,848 INFO [train.py:898] (1/4) Epoch 28, batch 3350, loss[loss=0.1951, simple_loss=0.2873, pruned_loss=0.05148, over 18331.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2463, pruned_loss=0.03233, over 3573417.25 frames. ], batch size: 56, lr: 3.98e-03, grad_scale: 16.0 +2023-03-09 23:02:45,006 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.59 vs. limit=2.0 +2023-03-09 23:03:28,358 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.802e+02 2.584e+02 3.127e+02 3.784e+02 7.730e+02, threshold=6.254e+02, percent-clipped=1.0 +2023-03-09 23:03:38,132 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7402, 3.6497, 5.0724, 2.9358, 4.3556, 2.5435, 2.9122, 1.7495], + device='cuda:1'), covar=tensor([0.1380, 0.0999, 0.0143, 0.1074, 0.0539, 0.2852, 0.3074, 0.2457], + device='cuda:1'), in_proj_covar=tensor([0.0232, 0.0254, 0.0230, 0.0210, 0.0266, 0.0282, 0.0339, 0.0248], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 23:03:38,771 INFO [train.py:898] (1/4) Epoch 28, batch 3400, loss[loss=0.1686, simple_loss=0.2601, pruned_loss=0.03851, over 18259.00 frames. ], tot_loss[loss=0.1557, simple_loss=0.2468, pruned_loss=0.03231, over 3580415.80 frames. ], batch size: 57, lr: 3.98e-03, grad_scale: 16.0 +2023-03-09 23:03:45,552 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.99 vs. limit=5.0 +2023-03-09 23:04:38,398 INFO [train.py:898] (1/4) Epoch 28, batch 3450, loss[loss=0.1377, simple_loss=0.2178, pruned_loss=0.02879, over 18427.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2466, pruned_loss=0.03225, over 3576926.90 frames. ], batch size: 48, lr: 3.98e-03, grad_scale: 16.0 +2023-03-09 23:04:55,961 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-03-09 23:05:13,011 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6505, 3.6726, 3.4963, 3.1769, 3.4215, 2.8751, 2.6589, 3.6181], + device='cuda:1'), covar=tensor([0.0076, 0.0091, 0.0088, 0.0139, 0.0103, 0.0190, 0.0244, 0.0084], + device='cuda:1'), in_proj_covar=tensor([0.0158, 0.0178, 0.0148, 0.0197, 0.0157, 0.0189, 0.0192, 0.0136], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 23:05:27,574 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 23:05:28,085 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.546e+02 2.589e+02 3.041e+02 3.646e+02 6.515e+02, threshold=6.081e+02, percent-clipped=1.0 +2023-03-09 23:05:37,349 INFO [train.py:898] (1/4) Epoch 28, batch 3500, loss[loss=0.1534, simple_loss=0.235, pruned_loss=0.03593, over 18580.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2463, pruned_loss=0.03215, over 3576164.32 frames. ], batch size: 45, lr: 3.97e-03, grad_scale: 8.0 +2023-03-09 23:06:04,797 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8533, 4.9209, 4.9860, 4.7028, 4.7523, 4.7441, 5.0529, 5.0939], + device='cuda:1'), covar=tensor([0.0080, 0.0059, 0.0058, 0.0118, 0.0057, 0.0150, 0.0070, 0.0079], + device='cuda:1'), in_proj_covar=tensor([0.0101, 0.0075, 0.0080, 0.0100, 0.0079, 0.0109, 0.0092, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 23:06:34,323 INFO [train.py:898] (1/4) Epoch 28, batch 3550, loss[loss=0.149, simple_loss=0.2386, pruned_loss=0.02969, over 18497.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.2461, pruned_loss=0.03199, over 3580707.74 frames. ], batch size: 51, lr: 3.97e-03, grad_scale: 8.0 +2023-03-09 23:06:50,610 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101684.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:07:07,914 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0520, 2.5007, 3.2501, 3.0725, 2.3959, 3.4153, 3.2374, 2.4325], + device='cuda:1'), covar=tensor([0.0559, 0.1400, 0.0542, 0.0444, 0.1449, 0.0345, 0.0650, 0.0973], + device='cuda:1'), in_proj_covar=tensor([0.0223, 0.0246, 0.0234, 0.0174, 0.0229, 0.0220, 0.0258, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 23:07:16,663 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9566, 5.0013, 5.1127, 4.7489, 4.8548, 4.7874, 5.0937, 5.1748], + device='cuda:1'), covar=tensor([0.0075, 0.0075, 0.0053, 0.0129, 0.0067, 0.0159, 0.0088, 0.0120], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0074, 0.0080, 0.0099, 0.0079, 0.0108, 0.0091, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 23:07:19,608 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.673e+02 2.328e+02 2.616e+02 3.053e+02 4.840e+02, threshold=5.233e+02, percent-clipped=0.0 +2023-03-09 23:07:28,385 INFO [train.py:898] (1/4) Epoch 28, batch 3600, loss[loss=0.1382, simple_loss=0.2208, pruned_loss=0.02778, over 18407.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.246, pruned_loss=0.03194, over 3580209.23 frames. ], batch size: 43, lr: 3.97e-03, grad_scale: 8.0 +2023-03-09 23:07:57,157 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=101745.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:08:29,826 INFO [train.py:898] (1/4) Epoch 29, batch 0, loss[loss=0.1581, simple_loss=0.247, pruned_loss=0.03465, over 18359.00 frames. ], tot_loss[loss=0.1581, simple_loss=0.247, pruned_loss=0.03465, over 18359.00 frames. ], batch size: 46, lr: 3.90e-03, grad_scale: 8.0 +2023-03-09 23:08:29,826 INFO [train.py:923] (1/4) Computing validation loss +2023-03-09 23:08:41,778 INFO [train.py:932] (1/4) Epoch 29, validation: loss=0.1494, simple_loss=0.2476, pruned_loss=0.02556, over 944034.00 frames. +2023-03-09 23:08:41,779 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-09 23:08:51,758 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.65 vs. limit=5.0 +2023-03-09 23:08:54,595 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=101763.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:09:40,294 INFO [train.py:898] (1/4) Epoch 29, batch 50, loss[loss=0.164, simple_loss=0.262, pruned_loss=0.03304, over 17131.00 frames. ], tot_loss[loss=0.155, simple_loss=0.2464, pruned_loss=0.03184, over 809703.56 frames. ], batch size: 78, lr: 3.90e-03, grad_scale: 4.0 +2023-03-09 23:09:50,574 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=101811.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:09:51,449 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.892e+02 2.454e+02 2.836e+02 3.650e+02 1.455e+03, threshold=5.673e+02, percent-clipped=9.0 +2023-03-09 23:10:10,896 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5275, 2.8891, 2.3821, 2.8249, 3.6195, 3.5223, 3.0865, 2.7625], + device='cuda:1'), covar=tensor([0.0176, 0.0259, 0.0627, 0.0422, 0.0186, 0.0175, 0.0420, 0.0452], + device='cuda:1'), in_proj_covar=tensor([0.0148, 0.0146, 0.0168, 0.0167, 0.0143, 0.0131, 0.0162, 0.0164], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 23:10:38,659 INFO [train.py:898] (1/4) Epoch 29, batch 100, loss[loss=0.1735, simple_loss=0.2695, pruned_loss=0.03881, over 18243.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.2463, pruned_loss=0.03198, over 1430578.07 frames. ], batch size: 60, lr: 3.90e-03, grad_scale: 4.0 +2023-03-09 23:11:08,516 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8401, 4.4518, 4.4734, 3.4205, 3.7289, 3.4941, 2.6845, 2.5483], + device='cuda:1'), covar=tensor([0.0212, 0.0161, 0.0083, 0.0318, 0.0344, 0.0229, 0.0717, 0.0831], + device='cuda:1'), in_proj_covar=tensor([0.0076, 0.0064, 0.0069, 0.0072, 0.0092, 0.0071, 0.0078, 0.0087], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 23:11:38,476 INFO [train.py:898] (1/4) Epoch 29, batch 150, loss[loss=0.1277, simple_loss=0.2137, pruned_loss=0.02087, over 18238.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.2465, pruned_loss=0.03189, over 1912780.46 frames. ], batch size: 45, lr: 3.90e-03, grad_scale: 4.0 +2023-03-09 23:11:49,335 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.704e+02 2.518e+02 2.968e+02 3.653e+02 8.556e+02, threshold=5.936e+02, percent-clipped=5.0 +2023-03-09 23:12:00,402 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101920.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 23:12:38,425 INFO [train.py:898] (1/4) Epoch 29, batch 200, loss[loss=0.161, simple_loss=0.2564, pruned_loss=0.03277, over 18502.00 frames. ], tot_loss[loss=0.156, simple_loss=0.2474, pruned_loss=0.03225, over 2271111.81 frames. ], batch size: 53, lr: 3.90e-03, grad_scale: 4.0 +2023-03-09 23:13:01,885 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-09 23:13:12,264 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=101981.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 23:13:18,840 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101987.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:13:42,372 INFO [train.py:898] (1/4) Epoch 29, batch 250, loss[loss=0.145, simple_loss=0.2382, pruned_loss=0.02592, over 18503.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2461, pruned_loss=0.03187, over 2573210.09 frames. ], batch size: 47, lr: 3.90e-03, grad_scale: 4.0 +2023-03-09 23:13:52,738 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.574e+02 2.573e+02 2.918e+02 3.581e+02 7.098e+02, threshold=5.836e+02, percent-clipped=3.0 +2023-03-09 23:14:26,061 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=102040.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:14:35,286 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=102048.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:14:41,079 INFO [train.py:898] (1/4) Epoch 29, batch 300, loss[loss=0.1783, simple_loss=0.2736, pruned_loss=0.04145, over 18304.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2469, pruned_loss=0.03201, over 2793859.97 frames. ], batch size: 54, lr: 3.90e-03, grad_scale: 4.0 +2023-03-09 23:14:44,881 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9811, 5.4810, 5.4293, 5.4603, 4.9489, 5.3819, 4.8467, 5.3647], + device='cuda:1'), covar=tensor([0.0242, 0.0227, 0.0169, 0.0383, 0.0378, 0.0197, 0.0956, 0.0268], + device='cuda:1'), in_proj_covar=tensor([0.0234, 0.0277, 0.0277, 0.0354, 0.0287, 0.0286, 0.0316, 0.0278], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 23:14:47,226 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7815, 3.3169, 4.4620, 3.8359, 3.0498, 4.7644, 4.0609, 3.0514], + device='cuda:1'), covar=tensor([0.0527, 0.1345, 0.0326, 0.0507, 0.1462, 0.0192, 0.0585, 0.1010], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0248, 0.0236, 0.0175, 0.0230, 0.0222, 0.0260, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 23:14:54,255 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6176, 2.9155, 2.5636, 2.9294, 3.6555, 3.5832, 3.1298, 2.8688], + device='cuda:1'), covar=tensor([0.0251, 0.0319, 0.0582, 0.0396, 0.0229, 0.0190, 0.0442, 0.0446], + device='cuda:1'), in_proj_covar=tensor([0.0149, 0.0148, 0.0168, 0.0167, 0.0144, 0.0132, 0.0163, 0.0167], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 23:15:02,788 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9203, 5.0321, 5.1071, 4.7652, 4.8216, 4.7685, 5.0803, 5.1372], + device='cuda:1'), covar=tensor([0.0088, 0.0070, 0.0061, 0.0134, 0.0058, 0.0185, 0.0089, 0.0094], + device='cuda:1'), in_proj_covar=tensor([0.0101, 0.0075, 0.0081, 0.0101, 0.0079, 0.0109, 0.0092, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 23:15:40,907 INFO [train.py:898] (1/4) Epoch 29, batch 350, loss[loss=0.1712, simple_loss=0.2708, pruned_loss=0.03582, over 18375.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.248, pruned_loss=0.03218, over 2972245.32 frames. ], batch size: 55, lr: 3.89e-03, grad_scale: 4.0 +2023-03-09 23:15:51,452 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.890e+02 2.462e+02 2.956e+02 3.341e+02 6.335e+02, threshold=5.913e+02, percent-clipped=2.0 +2023-03-09 23:16:13,377 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9023, 5.3820, 5.3846, 5.5074, 4.7736, 5.3089, 4.2751, 5.2963], + device='cuda:1'), covar=tensor([0.0279, 0.0361, 0.0241, 0.0409, 0.0419, 0.0268, 0.1619, 0.0337], + device='cuda:1'), in_proj_covar=tensor([0.0235, 0.0279, 0.0279, 0.0357, 0.0289, 0.0288, 0.0319, 0.0279], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 23:16:40,559 INFO [train.py:898] (1/4) Epoch 29, batch 400, loss[loss=0.1424, simple_loss=0.2272, pruned_loss=0.02878, over 18355.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2474, pruned_loss=0.03216, over 3119328.55 frames. ], batch size: 46, lr: 3.89e-03, grad_scale: 8.0 +2023-03-09 23:17:40,599 INFO [train.py:898] (1/4) Epoch 29, batch 450, loss[loss=0.1855, simple_loss=0.2688, pruned_loss=0.05113, over 11948.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.2475, pruned_loss=0.03234, over 3218370.64 frames. ], batch size: 129, lr: 3.89e-03, grad_scale: 8.0 +2023-03-09 23:17:50,992 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.718e+02 2.619e+02 3.042e+02 3.635e+02 5.576e+02, threshold=6.084e+02, percent-clipped=0.0 +2023-03-09 23:18:13,708 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6143, 3.6195, 3.4403, 3.1000, 3.3361, 2.8262, 2.6938, 3.6091], + device='cuda:1'), covar=tensor([0.0070, 0.0087, 0.0088, 0.0141, 0.0107, 0.0188, 0.0244, 0.0074], + device='cuda:1'), in_proj_covar=tensor([0.0159, 0.0179, 0.0149, 0.0197, 0.0158, 0.0190, 0.0194, 0.0137], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 23:18:40,652 INFO [train.py:898] (1/4) Epoch 29, batch 500, loss[loss=0.1411, simple_loss=0.2361, pruned_loss=0.02302, over 18544.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2474, pruned_loss=0.03194, over 3310966.49 frames. ], batch size: 49, lr: 3.89e-03, grad_scale: 8.0 +2023-03-09 23:19:07,682 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=102276.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 23:19:40,228 INFO [train.py:898] (1/4) Epoch 29, batch 550, loss[loss=0.1644, simple_loss=0.2529, pruned_loss=0.03794, over 11806.00 frames. ], tot_loss[loss=0.155, simple_loss=0.2467, pruned_loss=0.03166, over 3364377.95 frames. ], batch size: 130, lr: 3.89e-03, grad_scale: 8.0 +2023-03-09 23:19:50,535 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.948e+02 2.553e+02 3.006e+02 3.452e+02 5.617e+02, threshold=6.011e+02, percent-clipped=0.0 +2023-03-09 23:20:07,480 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7181, 3.1725, 4.6381, 4.0412, 3.2129, 4.8460, 4.2126, 3.1771], + device='cuda:1'), covar=tensor([0.0593, 0.1416, 0.0260, 0.0416, 0.1264, 0.0213, 0.0516, 0.0890], + device='cuda:1'), in_proj_covar=tensor([0.0226, 0.0249, 0.0237, 0.0175, 0.0231, 0.0222, 0.0261, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 23:20:24,215 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=102340.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:20:28,154 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=102343.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:20:37,307 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-03-09 23:20:39,915 INFO [train.py:898] (1/4) Epoch 29, batch 600, loss[loss=0.1627, simple_loss=0.2512, pruned_loss=0.03715, over 17913.00 frames. ], tot_loss[loss=0.1543, simple_loss=0.2453, pruned_loss=0.03171, over 3397833.94 frames. ], batch size: 65, lr: 3.89e-03, grad_scale: 8.0 +2023-03-09 23:21:17,855 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7816, 2.4687, 2.7113, 2.8559, 3.2039, 4.9046, 4.9331, 3.2998], + device='cuda:1'), covar=tensor([0.2071, 0.2619, 0.3092, 0.1891, 0.2521, 0.0304, 0.0332, 0.1134], + device='cuda:1'), in_proj_covar=tensor([0.0332, 0.0365, 0.0416, 0.0292, 0.0398, 0.0270, 0.0303, 0.0274], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:1') +2023-03-09 23:21:20,768 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=102388.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:21:39,509 INFO [train.py:898] (1/4) Epoch 29, batch 650, loss[loss=0.1701, simple_loss=0.2652, pruned_loss=0.03751, over 18318.00 frames. ], tot_loss[loss=0.1548, simple_loss=0.2454, pruned_loss=0.03206, over 3443243.50 frames. ], batch size: 54, lr: 3.89e-03, grad_scale: 8.0 +2023-03-09 23:21:49,330 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.777e+02 2.518e+02 2.994e+02 3.652e+02 5.297e+02, threshold=5.988e+02, percent-clipped=0.0 +2023-03-09 23:22:06,450 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2798, 5.2935, 5.6111, 5.6780, 5.2395, 6.1408, 5.8377, 5.3005], + device='cuda:1'), covar=tensor([0.1126, 0.0678, 0.0747, 0.0739, 0.1342, 0.0745, 0.0654, 0.1935], + device='cuda:1'), in_proj_covar=tensor([0.0381, 0.0311, 0.0338, 0.0339, 0.0347, 0.0452, 0.0306, 0.0448], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 23:22:38,225 INFO [train.py:898] (1/4) Epoch 29, batch 700, loss[loss=0.1323, simple_loss=0.2236, pruned_loss=0.02051, over 18182.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2464, pruned_loss=0.03235, over 3474074.21 frames. ], batch size: 44, lr: 3.89e-03, grad_scale: 8.0 +2023-03-09 23:23:37,716 INFO [train.py:898] (1/4) Epoch 29, batch 750, loss[loss=0.1494, simple_loss=0.2451, pruned_loss=0.02682, over 18530.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2462, pruned_loss=0.03232, over 3493330.19 frames. ], batch size: 49, lr: 3.89e-03, grad_scale: 8.0 +2023-03-09 23:23:48,499 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.674e+02 2.550e+02 3.053e+02 3.427e+02 7.248e+02, threshold=6.105e+02, percent-clipped=3.0 +2023-03-09 23:23:59,000 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0632, 5.1436, 5.1338, 4.7939, 4.9040, 4.8740, 5.2091, 5.2287], + device='cuda:1'), covar=tensor([0.0069, 0.0062, 0.0059, 0.0124, 0.0057, 0.0157, 0.0069, 0.0082], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0074, 0.0080, 0.0100, 0.0079, 0.0108, 0.0091, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 23:24:18,328 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7283, 5.2594, 5.2035, 5.2441, 4.7104, 5.1613, 4.6359, 5.1104], + device='cuda:1'), covar=tensor([0.0261, 0.0268, 0.0196, 0.0462, 0.0420, 0.0215, 0.1067, 0.0308], + device='cuda:1'), in_proj_covar=tensor([0.0237, 0.0281, 0.0280, 0.0360, 0.0290, 0.0289, 0.0321, 0.0281], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 23:24:35,934 INFO [train.py:898] (1/4) Epoch 29, batch 800, loss[loss=0.1329, simple_loss=0.2133, pruned_loss=0.02628, over 18375.00 frames. ], tot_loss[loss=0.1548, simple_loss=0.2453, pruned_loss=0.03213, over 3529428.71 frames. ], batch size: 42, lr: 3.89e-03, grad_scale: 8.0 +2023-03-09 23:24:57,320 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-09 23:25:03,654 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=102576.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 23:25:35,085 INFO [train.py:898] (1/4) Epoch 29, batch 850, loss[loss=0.1664, simple_loss=0.2567, pruned_loss=0.0381, over 17724.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2461, pruned_loss=0.0324, over 3543848.50 frames. ], batch size: 70, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:25:44,466 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.73 vs. limit=5.0 +2023-03-09 23:25:45,049 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6474, 3.8831, 2.3223, 3.8484, 5.0608, 2.5664, 3.7186, 3.8505], + device='cuda:1'), covar=tensor([0.0228, 0.1246, 0.1757, 0.0647, 0.0098, 0.1227, 0.0715, 0.0762], + device='cuda:1'), in_proj_covar=tensor([0.0189, 0.0288, 0.0213, 0.0204, 0.0147, 0.0188, 0.0226, 0.0236], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 23:25:45,741 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.923e+02 2.608e+02 3.070e+02 3.838e+02 1.078e+03, threshold=6.141e+02, percent-clipped=3.0 +2023-03-09 23:25:47,218 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3855, 2.7743, 4.1143, 3.5841, 2.6023, 4.2899, 3.7567, 2.7959], + device='cuda:1'), covar=tensor([0.0594, 0.1587, 0.0343, 0.0448, 0.1637, 0.0272, 0.0657, 0.1003], + device='cuda:1'), in_proj_covar=tensor([0.0227, 0.0249, 0.0237, 0.0175, 0.0231, 0.0222, 0.0262, 0.0202], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 23:26:00,157 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=102624.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 23:26:10,181 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0074, 5.5262, 5.4615, 5.4622, 4.9734, 5.4184, 4.8577, 5.3840], + device='cuda:1'), covar=tensor([0.0243, 0.0229, 0.0187, 0.0383, 0.0393, 0.0201, 0.1067, 0.0291], + device='cuda:1'), in_proj_covar=tensor([0.0237, 0.0282, 0.0282, 0.0362, 0.0291, 0.0291, 0.0322, 0.0282], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 23:26:22,203 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=102643.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:26:34,017 INFO [train.py:898] (1/4) Epoch 29, batch 900, loss[loss=0.1251, simple_loss=0.2131, pruned_loss=0.01855, over 18426.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2454, pruned_loss=0.03203, over 3563491.94 frames. ], batch size: 42, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:26:39,918 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5139, 6.0270, 5.6293, 5.8222, 5.6482, 5.4436, 6.1051, 6.0391], + device='cuda:1'), covar=tensor([0.1125, 0.0834, 0.0457, 0.0747, 0.1355, 0.0736, 0.0539, 0.0747], + device='cuda:1'), in_proj_covar=tensor([0.0650, 0.0572, 0.0409, 0.0597, 0.0793, 0.0594, 0.0814, 0.0624], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 23:27:18,874 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=102691.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:27:21,312 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0120, 5.1723, 2.5690, 5.0681, 4.8016, 5.1772, 4.9297, 2.1060], + device='cuda:1'), covar=tensor([0.0264, 0.0125, 0.1148, 0.0130, 0.0155, 0.0161, 0.0172, 0.1886], + device='cuda:1'), in_proj_covar=tensor([0.0094, 0.0085, 0.0099, 0.0101, 0.0092, 0.0081, 0.0088, 0.0100], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-09 23:27:32,892 INFO [train.py:898] (1/4) Epoch 29, batch 950, loss[loss=0.1354, simple_loss=0.2224, pruned_loss=0.02419, over 18263.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2452, pruned_loss=0.03184, over 3575867.24 frames. ], batch size: 45, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:27:43,046 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.728e+02 2.495e+02 2.903e+02 3.483e+02 7.298e+02, threshold=5.805e+02, percent-clipped=1.0 +2023-03-09 23:28:24,440 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.95 vs. limit=5.0 +2023-03-09 23:28:31,530 INFO [train.py:898] (1/4) Epoch 29, batch 1000, loss[loss=0.1593, simple_loss=0.2516, pruned_loss=0.03352, over 18394.00 frames. ], tot_loss[loss=0.1539, simple_loss=0.2448, pruned_loss=0.03146, over 3588498.79 frames. ], batch size: 52, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:28:48,411 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0535, 5.5838, 5.5352, 5.5249, 4.9776, 5.4607, 4.9300, 5.4172], + device='cuda:1'), covar=tensor([0.0248, 0.0213, 0.0169, 0.0357, 0.0394, 0.0204, 0.0994, 0.0339], + device='cuda:1'), in_proj_covar=tensor([0.0239, 0.0283, 0.0282, 0.0363, 0.0292, 0.0292, 0.0322, 0.0284], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 23:28:56,628 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-09 23:29:00,176 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5691, 2.3733, 2.5084, 2.6271, 3.0654, 4.7071, 4.6929, 3.4369], + device='cuda:1'), covar=tensor([0.2288, 0.2786, 0.3401, 0.2148, 0.2729, 0.0328, 0.0382, 0.0993], + device='cuda:1'), in_proj_covar=tensor([0.0333, 0.0366, 0.0417, 0.0294, 0.0398, 0.0271, 0.0305, 0.0275], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-03-09 23:29:04,589 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9597, 4.3349, 2.5797, 4.0025, 5.2914, 2.6645, 3.8202, 4.0389], + device='cuda:1'), covar=tensor([0.0207, 0.1016, 0.1605, 0.0680, 0.0095, 0.1277, 0.0756, 0.0745], + device='cuda:1'), in_proj_covar=tensor([0.0188, 0.0285, 0.0211, 0.0202, 0.0146, 0.0187, 0.0225, 0.0233], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 23:29:11,613 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-09 23:29:31,336 INFO [train.py:898] (1/4) Epoch 29, batch 1050, loss[loss=0.1621, simple_loss=0.2566, pruned_loss=0.03379, over 18335.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2446, pruned_loss=0.03149, over 3587684.19 frames. ], batch size: 56, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:29:34,115 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9457, 3.8499, 5.2644, 3.1839, 4.6242, 2.7431, 3.1331, 1.8833], + device='cuda:1'), covar=tensor([0.1233, 0.0920, 0.0153, 0.0908, 0.0457, 0.2633, 0.2901, 0.2345], + device='cuda:1'), in_proj_covar=tensor([0.0235, 0.0256, 0.0233, 0.0211, 0.0267, 0.0283, 0.0341, 0.0250], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 23:29:42,514 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.563e+02 2.351e+02 2.635e+02 3.103e+02 8.908e+02, threshold=5.271e+02, percent-clipped=2.0 +2023-03-09 23:30:27,424 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=102851.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:30:29,890 INFO [train.py:898] (1/4) Epoch 29, batch 1100, loss[loss=0.1652, simple_loss=0.2566, pruned_loss=0.03687, over 18279.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.2454, pruned_loss=0.0318, over 3587392.59 frames. ], batch size: 57, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:31:10,250 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=102887.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:31:28,099 INFO [train.py:898] (1/4) Epoch 29, batch 1150, loss[loss=0.1507, simple_loss=0.24, pruned_loss=0.03069, over 18302.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.2453, pruned_loss=0.03186, over 3589825.80 frames. ], batch size: 49, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:31:32,443 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9503, 4.9629, 5.0062, 4.7525, 4.7788, 4.8575, 5.0965, 5.1016], + device='cuda:1'), covar=tensor([0.0076, 0.0071, 0.0068, 0.0127, 0.0066, 0.0164, 0.0085, 0.0077], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0074, 0.0080, 0.0099, 0.0079, 0.0108, 0.0091, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 23:31:38,791 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.637e+02 2.444e+02 3.032e+02 3.732e+02 6.439e+02, threshold=6.064e+02, percent-clipped=4.0 +2023-03-09 23:31:39,251 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=102912.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:32:22,232 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=102948.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:32:27,593 INFO [train.py:898] (1/4) Epoch 29, batch 1200, loss[loss=0.1656, simple_loss=0.2606, pruned_loss=0.03524, over 18324.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.2451, pruned_loss=0.03166, over 3591905.26 frames. ], batch size: 56, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:32:57,434 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9159, 3.7767, 5.1603, 4.6362, 3.5587, 3.1474, 4.7260, 5.4078], + device='cuda:1'), covar=tensor([0.0753, 0.1520, 0.0198, 0.0334, 0.0877, 0.1214, 0.0318, 0.0207], + device='cuda:1'), in_proj_covar=tensor([0.0153, 0.0282, 0.0175, 0.0186, 0.0197, 0.0196, 0.0201, 0.0214], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 23:33:17,632 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0997, 5.2193, 5.2000, 4.9763, 4.9646, 4.9686, 5.3298, 5.3166], + device='cuda:1'), covar=tensor([0.0069, 0.0052, 0.0052, 0.0106, 0.0054, 0.0149, 0.0069, 0.0075], + device='cuda:1'), in_proj_covar=tensor([0.0100, 0.0074, 0.0080, 0.0100, 0.0079, 0.0108, 0.0091, 0.0091], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 23:33:26,584 INFO [train.py:898] (1/4) Epoch 29, batch 1250, loss[loss=0.1631, simple_loss=0.257, pruned_loss=0.03463, over 17730.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.2462, pruned_loss=0.03195, over 3603368.72 frames. ], batch size: 70, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:33:36,999 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.897e+02 2.667e+02 2.982e+02 3.378e+02 7.201e+02, threshold=5.965e+02, percent-clipped=1.0 +2023-03-09 23:33:43,285 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8911, 5.3566, 5.3320, 5.3296, 4.8113, 5.2503, 4.7098, 5.2618], + device='cuda:1'), covar=tensor([0.0244, 0.0267, 0.0182, 0.0532, 0.0408, 0.0252, 0.1034, 0.0313], + device='cuda:1'), in_proj_covar=tensor([0.0236, 0.0281, 0.0280, 0.0360, 0.0290, 0.0291, 0.0319, 0.0281], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 23:34:25,434 INFO [train.py:898] (1/4) Epoch 29, batch 1300, loss[loss=0.1456, simple_loss=0.2335, pruned_loss=0.02888, over 18329.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2459, pruned_loss=0.03193, over 3606255.80 frames. ], batch size: 46, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:34:30,491 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0316, 4.7572, 4.8718, 3.6275, 3.9998, 3.6805, 3.0102, 2.8792], + device='cuda:1'), covar=tensor([0.0204, 0.0130, 0.0060, 0.0277, 0.0311, 0.0210, 0.0605, 0.0711], + device='cuda:1'), in_proj_covar=tensor([0.0076, 0.0064, 0.0069, 0.0072, 0.0093, 0.0071, 0.0079, 0.0087], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 23:35:19,293 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103098.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:35:24,705 INFO [train.py:898] (1/4) Epoch 29, batch 1350, loss[loss=0.1868, simple_loss=0.2703, pruned_loss=0.05166, over 12530.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2462, pruned_loss=0.03214, over 3599417.59 frames. ], batch size: 129, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:35:30,975 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8831, 5.4252, 2.8617, 5.2685, 5.1902, 5.4715, 5.2955, 2.7102], + device='cuda:1'), covar=tensor([0.0239, 0.0079, 0.0776, 0.0077, 0.0070, 0.0058, 0.0079, 0.1001], + device='cuda:1'), in_proj_covar=tensor([0.0095, 0.0085, 0.0099, 0.0101, 0.0091, 0.0081, 0.0088, 0.0099], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-09 23:35:35,179 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.567e+02 2.524e+02 2.935e+02 3.557e+02 7.291e+02, threshold=5.869e+02, percent-clipped=2.0 +2023-03-09 23:35:41,637 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8064, 5.2696, 5.2130, 5.2550, 4.7347, 5.1457, 4.6585, 5.1476], + device='cuda:1'), covar=tensor([0.0230, 0.0248, 0.0184, 0.0400, 0.0371, 0.0204, 0.0927, 0.0284], + device='cuda:1'), in_proj_covar=tensor([0.0235, 0.0278, 0.0278, 0.0357, 0.0288, 0.0288, 0.0317, 0.0279], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 23:35:59,055 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103132.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:36:23,951 INFO [train.py:898] (1/4) Epoch 29, batch 1400, loss[loss=0.1475, simple_loss=0.2278, pruned_loss=0.03363, over 18241.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2464, pruned_loss=0.0323, over 3589466.13 frames. ], batch size: 45, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:36:31,060 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103159.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:36:46,315 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1366, 5.2225, 4.4549, 5.0718, 5.1689, 4.6048, 4.9895, 4.6648], + device='cuda:1'), covar=tensor([0.0903, 0.0735, 0.2575, 0.1268, 0.0755, 0.0624, 0.0823, 0.1517], + device='cuda:1'), in_proj_covar=tensor([0.0528, 0.0597, 0.0740, 0.0461, 0.0485, 0.0543, 0.0575, 0.0717], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-09 23:37:00,390 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8510, 5.3394, 5.3204, 5.3653, 4.8553, 5.2828, 4.6237, 5.2533], + device='cuda:1'), covar=tensor([0.0244, 0.0313, 0.0206, 0.0448, 0.0367, 0.0238, 0.1137, 0.0315], + device='cuda:1'), in_proj_covar=tensor([0.0237, 0.0281, 0.0280, 0.0360, 0.0290, 0.0290, 0.0320, 0.0281], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 23:37:03,786 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5140, 2.9096, 4.4664, 3.6485, 2.6667, 4.6606, 3.9862, 2.7895], + device='cuda:1'), covar=tensor([0.0607, 0.1478, 0.0287, 0.0487, 0.1623, 0.0214, 0.0525, 0.1019], + device='cuda:1'), in_proj_covar=tensor([0.0222, 0.0243, 0.0234, 0.0173, 0.0226, 0.0217, 0.0256, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 23:37:11,203 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103193.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:37:22,803 INFO [train.py:898] (1/4) Epoch 29, batch 1450, loss[loss=0.1679, simple_loss=0.2702, pruned_loss=0.03279, over 18553.00 frames. ], tot_loss[loss=0.155, simple_loss=0.2458, pruned_loss=0.03215, over 3584138.48 frames. ], batch size: 54, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:37:27,605 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103207.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:37:30,370 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-09 23:37:33,042 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.757e+02 2.357e+02 2.757e+02 3.131e+02 6.312e+02, threshold=5.514e+02, percent-clipped=1.0 +2023-03-09 23:37:58,434 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8240, 2.9813, 2.7181, 2.9659, 3.7928, 3.8092, 3.3245, 3.0228], + device='cuda:1'), covar=tensor([0.0167, 0.0320, 0.0537, 0.0403, 0.0168, 0.0129, 0.0325, 0.0391], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0150, 0.0169, 0.0168, 0.0145, 0.0133, 0.0164, 0.0167], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 23:38:09,450 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103243.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:38:21,767 INFO [train.py:898] (1/4) Epoch 29, batch 1500, loss[loss=0.1536, simple_loss=0.2503, pruned_loss=0.0284, over 18364.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2459, pruned_loss=0.03177, over 3599009.25 frames. ], batch size: 55, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:38:33,709 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.16 vs. limit=5.0 +2023-03-09 23:39:20,021 INFO [train.py:898] (1/4) Epoch 29, batch 1550, loss[loss=0.1382, simple_loss=0.2295, pruned_loss=0.02345, over 18501.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.245, pruned_loss=0.03166, over 3596378.70 frames. ], batch size: 47, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:39:29,919 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.405e+02 2.624e+02 2.952e+02 3.606e+02 8.116e+02, threshold=5.905e+02, percent-clipped=6.0 +2023-03-09 23:40:18,837 INFO [train.py:898] (1/4) Epoch 29, batch 1600, loss[loss=0.1352, simple_loss=0.2172, pruned_loss=0.02665, over 18368.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2452, pruned_loss=0.03198, over 3587541.15 frames. ], batch size: 42, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:40:34,712 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103366.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 23:40:59,978 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6903, 2.8864, 2.7018, 2.9798, 3.6704, 3.6590, 3.2444, 2.9439], + device='cuda:1'), covar=tensor([0.0183, 0.0322, 0.0524, 0.0357, 0.0193, 0.0169, 0.0363, 0.0339], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0150, 0.0170, 0.0169, 0.0146, 0.0133, 0.0165, 0.0168], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 23:41:18,030 INFO [train.py:898] (1/4) Epoch 29, batch 1650, loss[loss=0.1481, simple_loss=0.2328, pruned_loss=0.03166, over 18464.00 frames. ], tot_loss[loss=0.1539, simple_loss=0.2446, pruned_loss=0.03154, over 3595761.28 frames. ], batch size: 43, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:41:29,738 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.750e+02 2.322e+02 2.729e+02 3.382e+02 5.618e+02, threshold=5.459e+02, percent-clipped=0.0 +2023-03-09 23:41:47,073 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103427.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 23:42:17,237 INFO [train.py:898] (1/4) Epoch 29, batch 1700, loss[loss=0.1428, simple_loss=0.2316, pruned_loss=0.02704, over 18415.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2455, pruned_loss=0.03192, over 3587050.07 frames. ], batch size: 48, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:42:18,657 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103454.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:42:41,818 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7730, 3.0123, 2.8150, 3.0693, 3.7993, 3.7874, 3.3651, 3.0717], + device='cuda:1'), covar=tensor([0.0187, 0.0259, 0.0526, 0.0389, 0.0179, 0.0158, 0.0356, 0.0363], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0150, 0.0170, 0.0168, 0.0146, 0.0133, 0.0165, 0.0167], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 23:42:58,883 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103488.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:43:01,946 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7663, 3.7691, 3.5927, 3.2070, 3.5050, 2.8000, 2.8392, 3.7400], + device='cuda:1'), covar=tensor([0.0064, 0.0105, 0.0082, 0.0139, 0.0109, 0.0206, 0.0210, 0.0077], + device='cuda:1'), in_proj_covar=tensor([0.0158, 0.0178, 0.0148, 0.0197, 0.0157, 0.0189, 0.0193, 0.0138], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 23:43:16,072 INFO [train.py:898] (1/4) Epoch 29, batch 1750, loss[loss=0.1762, simple_loss=0.2654, pruned_loss=0.04345, over 18161.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.2455, pruned_loss=0.03173, over 3593304.30 frames. ], batch size: 62, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:43:20,998 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103507.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:43:27,002 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.672e+02 2.484e+02 2.930e+02 3.685e+02 5.975e+02, threshold=5.860e+02, percent-clipped=1.0 +2023-03-09 23:44:04,207 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103543.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:44:16,098 INFO [train.py:898] (1/4) Epoch 29, batch 1800, loss[loss=0.1609, simple_loss=0.2487, pruned_loss=0.0366, over 18488.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.246, pruned_loss=0.03189, over 3598873.31 frames. ], batch size: 53, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:44:18,471 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103555.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:44:19,141 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-09 23:44:46,529 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.53 vs. limit=5.0 +2023-03-09 23:45:00,577 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103591.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:45:15,441 INFO [train.py:898] (1/4) Epoch 29, batch 1850, loss[loss=0.1403, simple_loss=0.2237, pruned_loss=0.02841, over 18423.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.2463, pruned_loss=0.03199, over 3596099.53 frames. ], batch size: 43, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:45:25,538 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.856e+02 2.569e+02 2.972e+02 3.705e+02 6.711e+02, threshold=5.945e+02, percent-clipped=3.0 +2023-03-09 23:46:14,389 INFO [train.py:898] (1/4) Epoch 29, batch 1900, loss[loss=0.171, simple_loss=0.2615, pruned_loss=0.04027, over 18225.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2465, pruned_loss=0.03206, over 3589761.00 frames. ], batch size: 60, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:47:14,030 INFO [train.py:898] (1/4) Epoch 29, batch 1950, loss[loss=0.1357, simple_loss=0.2195, pruned_loss=0.026, over 18495.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2459, pruned_loss=0.03193, over 3590872.32 frames. ], batch size: 44, lr: 3.86e-03, grad_scale: 8.0 +2023-03-09 23:47:24,464 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.577e+02 2.406e+02 2.960e+02 3.499e+02 1.191e+03, threshold=5.920e+02, percent-clipped=3.0 +2023-03-09 23:47:26,110 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9248, 3.6762, 5.4630, 3.2994, 4.8014, 2.7806, 3.2320, 1.9065], + device='cuda:1'), covar=tensor([0.1182, 0.0990, 0.0104, 0.0765, 0.0368, 0.2443, 0.2522, 0.2256], + device='cuda:1'), in_proj_covar=tensor([0.0234, 0.0256, 0.0234, 0.0210, 0.0268, 0.0283, 0.0340, 0.0249], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-09 23:47:36,596 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103722.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 23:47:57,807 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103740.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:48:13,370 INFO [train.py:898] (1/4) Epoch 29, batch 2000, loss[loss=0.1352, simple_loss=0.2189, pruned_loss=0.02573, over 16774.00 frames. ], tot_loss[loss=0.1552, simple_loss=0.2461, pruned_loss=0.03212, over 3580215.91 frames. ], batch size: 37, lr: 3.86e-03, grad_scale: 8.0 +2023-03-09 23:48:14,745 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103754.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:48:27,281 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103765.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:48:27,292 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3544, 2.7203, 2.4859, 2.7620, 3.4904, 3.4359, 3.0419, 2.7465], + device='cuda:1'), covar=tensor([0.0224, 0.0315, 0.0596, 0.0434, 0.0204, 0.0164, 0.0401, 0.0436], + device='cuda:1'), in_proj_covar=tensor([0.0151, 0.0150, 0.0169, 0.0168, 0.0145, 0.0132, 0.0164, 0.0167], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 23:48:54,539 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0510, 5.5049, 5.4847, 5.4810, 4.9379, 5.4018, 4.8677, 5.3895], + device='cuda:1'), covar=tensor([0.0217, 0.0240, 0.0167, 0.0426, 0.0382, 0.0219, 0.0963, 0.0276], + device='cuda:1'), in_proj_covar=tensor([0.0235, 0.0281, 0.0280, 0.0359, 0.0288, 0.0289, 0.0318, 0.0280], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-09 23:48:54,564 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103788.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:49:08,609 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6176, 2.4193, 2.5041, 2.6330, 2.9797, 4.1429, 4.1046, 3.0160], + device='cuda:1'), covar=tensor([0.2113, 0.2510, 0.3087, 0.2070, 0.2522, 0.0429, 0.0474, 0.1194], + device='cuda:1'), in_proj_covar=tensor([0.0335, 0.0368, 0.0421, 0.0294, 0.0399, 0.0272, 0.0306, 0.0276], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-03-09 23:49:09,560 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103801.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:49:11,718 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103802.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:49:12,643 INFO [train.py:898] (1/4) Epoch 29, batch 2050, loss[loss=0.1529, simple_loss=0.2463, pruned_loss=0.02972, over 18584.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2457, pruned_loss=0.03183, over 3580254.31 frames. ], batch size: 54, lr: 3.86e-03, grad_scale: 4.0 +2023-03-09 23:49:23,958 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.828e+02 2.583e+02 3.001e+02 3.513e+02 6.272e+02, threshold=6.003e+02, percent-clipped=1.0 +2023-03-09 23:49:34,875 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6713, 3.2216, 4.5786, 3.6869, 2.8461, 4.7908, 3.9749, 2.9657], + device='cuda:1'), covar=tensor([0.0591, 0.1355, 0.0269, 0.0573, 0.1640, 0.0212, 0.0670, 0.1032], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0247, 0.0238, 0.0175, 0.0230, 0.0221, 0.0261, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 23:49:39,215 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103826.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:49:50,968 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103836.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:50:10,330 INFO [train.py:898] (1/4) Epoch 29, batch 2100, loss[loss=0.1417, simple_loss=0.2263, pruned_loss=0.02853, over 18500.00 frames. ], tot_loss[loss=0.155, simple_loss=0.2458, pruned_loss=0.03209, over 3575982.97 frames. ], batch size: 47, lr: 3.86e-03, grad_scale: 4.0 +2023-03-09 23:50:38,445 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103876.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:51:09,169 INFO [train.py:898] (1/4) Epoch 29, batch 2150, loss[loss=0.1292, simple_loss=0.2164, pruned_loss=0.02104, over 18502.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2451, pruned_loss=0.03185, over 3583551.03 frames. ], batch size: 47, lr: 3.86e-03, grad_scale: 4.0 +2023-03-09 23:51:21,586 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.841e+02 2.540e+02 2.937e+02 3.473e+02 7.203e+02, threshold=5.874e+02, percent-clipped=1.0 +2023-03-09 23:51:30,906 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.6391, 6.1491, 5.6528, 5.9571, 5.8098, 5.5568, 6.2175, 6.1689], + device='cuda:1'), covar=tensor([0.0986, 0.0695, 0.0393, 0.0676, 0.1102, 0.0646, 0.0529, 0.0676], + device='cuda:1'), in_proj_covar=tensor([0.0645, 0.0566, 0.0406, 0.0592, 0.0787, 0.0586, 0.0808, 0.0618], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-09 23:51:35,618 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6842, 2.4428, 2.5717, 2.6021, 3.2299, 4.6914, 4.6151, 3.3747], + device='cuda:1'), covar=tensor([0.2132, 0.2619, 0.3248, 0.2135, 0.2429, 0.0325, 0.0396, 0.1094], + device='cuda:1'), in_proj_covar=tensor([0.0335, 0.0367, 0.0420, 0.0293, 0.0399, 0.0272, 0.0305, 0.0275], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-03-09 23:51:43,741 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0242, 4.9198, 5.0640, 4.7331, 4.6714, 4.9524, 5.2117, 5.0972], + device='cuda:1'), covar=tensor([0.0106, 0.0101, 0.0113, 0.0163, 0.0106, 0.0201, 0.0105, 0.0142], + device='cuda:1'), in_proj_covar=tensor([0.0101, 0.0075, 0.0081, 0.0101, 0.0080, 0.0109, 0.0092, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-09 23:51:50,010 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103937.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 23:52:07,790 INFO [train.py:898] (1/4) Epoch 29, batch 2200, loss[loss=0.1412, simple_loss=0.2307, pruned_loss=0.02581, over 18351.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.245, pruned_loss=0.03167, over 3585814.62 frames. ], batch size: 46, lr: 3.86e-03, grad_scale: 4.0 +2023-03-09 23:52:40,881 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5573, 3.4632, 2.3113, 4.3843, 2.9875, 4.0892, 2.6564, 3.7816], + device='cuda:1'), covar=tensor([0.0656, 0.0835, 0.1491, 0.0538, 0.0914, 0.0358, 0.1101, 0.0487], + device='cuda:1'), in_proj_covar=tensor([0.0224, 0.0234, 0.0198, 0.0299, 0.0200, 0.0273, 0.0208, 0.0210], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 23:52:52,583 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103990.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:53:00,488 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103997.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:53:11,843 INFO [train.py:898] (1/4) Epoch 29, batch 2250, loss[loss=0.1469, simple_loss=0.2396, pruned_loss=0.0271, over 18366.00 frames. ], tot_loss[loss=0.1548, simple_loss=0.2457, pruned_loss=0.03193, over 3586723.55 frames. ], batch size: 55, lr: 3.86e-03, grad_scale: 4.0 +2023-03-09 23:53:23,833 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.696e+02 2.605e+02 3.021e+02 3.898e+02 6.777e+02, threshold=6.041e+02, percent-clipped=7.0 +2023-03-09 23:53:34,979 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104022.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 23:54:06,854 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104049.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:54:09,208 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104051.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:54:11,012 INFO [train.py:898] (1/4) Epoch 29, batch 2300, loss[loss=0.1827, simple_loss=0.2695, pruned_loss=0.04794, over 12452.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2453, pruned_loss=0.03199, over 3584871.88 frames. ], batch size: 130, lr: 3.86e-03, grad_scale: 4.0 +2023-03-09 23:54:17,202 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104058.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:54:31,637 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104070.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 23:55:02,121 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104096.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:55:09,792 INFO [train.py:898] (1/4) Epoch 29, batch 2350, loss[loss=0.1698, simple_loss=0.2737, pruned_loss=0.03293, over 18402.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2466, pruned_loss=0.03232, over 3587197.12 frames. ], batch size: 52, lr: 3.86e-03, grad_scale: 4.0 +2023-03-09 23:55:17,813 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104110.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:55:20,807 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.751e+02 2.516e+02 3.034e+02 3.575e+02 8.102e+02, threshold=6.068e+02, percent-clipped=2.0 +2023-03-09 23:55:30,841 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104121.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:55:55,623 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2212, 5.2330, 5.5325, 5.5622, 5.1939, 6.0868, 5.7652, 5.4111], + device='cuda:1'), covar=tensor([0.1193, 0.0630, 0.0925, 0.0805, 0.1378, 0.0631, 0.0682, 0.1566], + device='cuda:1'), in_proj_covar=tensor([0.0383, 0.0312, 0.0342, 0.0344, 0.0351, 0.0456, 0.0310, 0.0451], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-09 23:56:08,415 INFO [train.py:898] (1/4) Epoch 29, batch 2400, loss[loss=0.1558, simple_loss=0.2453, pruned_loss=0.03311, over 18379.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2464, pruned_loss=0.03222, over 3574454.23 frames. ], batch size: 50, lr: 3.86e-03, grad_scale: 8.0 +2023-03-09 23:57:01,618 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.75 vs. limit=2.0 +2023-03-09 23:57:07,522 INFO [train.py:898] (1/4) Epoch 29, batch 2450, loss[loss=0.1758, simple_loss=0.266, pruned_loss=0.04278, over 18481.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2464, pruned_loss=0.0321, over 3574497.95 frames. ], batch size: 51, lr: 3.86e-03, grad_scale: 8.0 +2023-03-09 23:57:18,499 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.747e+02 2.425e+02 2.919e+02 3.520e+02 8.607e+02, threshold=5.838e+02, percent-clipped=3.0 +2023-03-09 23:57:22,797 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-09 23:57:41,206 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104232.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 23:57:48,470 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6709, 2.4976, 2.6265, 2.6057, 3.1597, 4.6918, 4.6544, 2.9100], + device='cuda:1'), covar=tensor([0.2176, 0.2456, 0.3112, 0.2117, 0.2514, 0.0301, 0.0367, 0.1336], + device='cuda:1'), in_proj_covar=tensor([0.0337, 0.0369, 0.0423, 0.0295, 0.0402, 0.0273, 0.0307, 0.0277], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:1') +2023-03-09 23:58:05,927 INFO [train.py:898] (1/4) Epoch 29, batch 2500, loss[loss=0.1313, simple_loss=0.2182, pruned_loss=0.0222, over 18157.00 frames. ], tot_loss[loss=0.1552, simple_loss=0.2463, pruned_loss=0.0321, over 3586623.18 frames. ], batch size: 44, lr: 3.85e-03, grad_scale: 8.0 +2023-03-09 23:58:37,501 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-09 23:58:57,868 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4814, 3.2348, 2.1869, 4.2441, 2.9675, 3.9591, 2.4350, 3.7493], + device='cuda:1'), covar=tensor([0.0712, 0.0988, 0.1556, 0.0497, 0.0893, 0.0319, 0.1265, 0.0439], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0235, 0.0197, 0.0301, 0.0200, 0.0274, 0.0209, 0.0210], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-09 23:59:04,430 INFO [train.py:898] (1/4) Epoch 29, batch 2550, loss[loss=0.1616, simple_loss=0.2582, pruned_loss=0.03244, over 18492.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2468, pruned_loss=0.03217, over 3588705.16 frames. ], batch size: 53, lr: 3.85e-03, grad_scale: 8.0 +2023-03-09 23:59:16,565 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.724e+02 2.479e+02 2.941e+02 3.574e+02 6.228e+02, threshold=5.882e+02, percent-clipped=1.0 +2023-03-09 23:59:29,756 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-09 23:59:55,464 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104346.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:00:03,135 INFO [train.py:898] (1/4) Epoch 29, batch 2600, loss[loss=0.2001, simple_loss=0.2796, pruned_loss=0.06026, over 12672.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2469, pruned_loss=0.0323, over 3580049.22 frames. ], batch size: 129, lr: 3.85e-03, grad_scale: 8.0 +2023-03-10 00:00:03,378 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104353.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:00:49,831 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8170, 5.3223, 5.3087, 5.3367, 4.7580, 5.2402, 4.6885, 5.1849], + device='cuda:1'), covar=tensor([0.0246, 0.0284, 0.0190, 0.0397, 0.0379, 0.0218, 0.1065, 0.0314], + device='cuda:1'), in_proj_covar=tensor([0.0236, 0.0280, 0.0281, 0.0360, 0.0288, 0.0290, 0.0320, 0.0280], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-10 00:00:54,660 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104396.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:01:03,257 INFO [train.py:898] (1/4) Epoch 29, batch 2650, loss[loss=0.1269, simple_loss=0.2067, pruned_loss=0.02353, over 17730.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2468, pruned_loss=0.03207, over 3591037.49 frames. ], batch size: 39, lr: 3.85e-03, grad_scale: 8.0 +2023-03-10 00:01:05,759 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104405.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:01:15,456 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.642e+02 2.433e+02 2.852e+02 3.546e+02 8.268e+02, threshold=5.705e+02, percent-clipped=2.0 +2023-03-10 00:01:24,957 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104421.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:01:28,750 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8241, 4.0293, 2.4819, 4.0199, 5.1290, 2.5736, 3.7861, 3.9762], + device='cuda:1'), covar=tensor([0.0235, 0.1333, 0.1649, 0.0662, 0.0119, 0.1273, 0.0723, 0.0743], + device='cuda:1'), in_proj_covar=tensor([0.0187, 0.0285, 0.0212, 0.0203, 0.0147, 0.0188, 0.0224, 0.0234], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-10 00:01:52,710 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104444.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:02:03,367 INFO [train.py:898] (1/4) Epoch 29, batch 2700, loss[loss=0.1647, simple_loss=0.2456, pruned_loss=0.04194, over 12699.00 frames. ], tot_loss[loss=0.1563, simple_loss=0.2476, pruned_loss=0.03244, over 3572262.80 frames. ], batch size: 130, lr: 3.85e-03, grad_scale: 8.0 +2023-03-10 00:02:22,287 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104469.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:03:01,345 INFO [train.py:898] (1/4) Epoch 29, batch 2750, loss[loss=0.1703, simple_loss=0.2672, pruned_loss=0.03666, over 18566.00 frames. ], tot_loss[loss=0.156, simple_loss=0.2473, pruned_loss=0.03236, over 3577003.78 frames. ], batch size: 54, lr: 3.85e-03, grad_scale: 8.0 +2023-03-10 00:03:12,786 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.672e+02 2.565e+02 3.025e+02 3.690e+02 7.436e+02, threshold=6.050e+02, percent-clipped=2.0 +2023-03-10 00:03:35,756 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104532.0, num_to_drop=1, layers_to_drop={0} +2023-03-10 00:03:59,996 INFO [train.py:898] (1/4) Epoch 29, batch 2800, loss[loss=0.1348, simple_loss=0.2209, pruned_loss=0.0243, over 18407.00 frames. ], tot_loss[loss=0.156, simple_loss=0.2472, pruned_loss=0.0324, over 3580582.35 frames. ], batch size: 42, lr: 3.85e-03, grad_scale: 8.0 +2023-03-10 00:04:28,331 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.3106, 5.0959, 4.9441, 4.9520, 4.5806, 4.8383, 5.2512, 5.1031], + device='cuda:1'), covar=tensor([0.2629, 0.1245, 0.1219, 0.1273, 0.2469, 0.1279, 0.1082, 0.1196], + device='cuda:1'), in_proj_covar=tensor([0.0652, 0.0574, 0.0411, 0.0598, 0.0794, 0.0594, 0.0815, 0.0627], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-10 00:04:31,629 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104580.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:04:45,757 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-10 00:04:58,830 INFO [train.py:898] (1/4) Epoch 29, batch 2850, loss[loss=0.1619, simple_loss=0.256, pruned_loss=0.03386, over 18367.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2468, pruned_loss=0.03221, over 3583684.45 frames. ], batch size: 56, lr: 3.85e-03, grad_scale: 8.0 +2023-03-10 00:05:10,927 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.658e+02 2.500e+02 2.877e+02 3.433e+02 5.313e+02, threshold=5.755e+02, percent-clipped=0.0 +2023-03-10 00:05:21,174 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7441, 3.7323, 2.5532, 4.6077, 3.3106, 4.4380, 2.8624, 4.2701], + device='cuda:1'), covar=tensor([0.0678, 0.0775, 0.1248, 0.0467, 0.0749, 0.0333, 0.1038, 0.0353], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0235, 0.0197, 0.0301, 0.0199, 0.0273, 0.0209, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-10 00:05:50,625 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104646.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:05:58,418 INFO [train.py:898] (1/4) Epoch 29, batch 2900, loss[loss=0.1456, simple_loss=0.2293, pruned_loss=0.03095, over 18261.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.2463, pruned_loss=0.03193, over 3585593.37 frames. ], batch size: 45, lr: 3.85e-03, grad_scale: 8.0 +2023-03-10 00:05:58,765 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104653.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:06:47,692 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104694.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:06:56,052 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104701.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:06:58,033 INFO [train.py:898] (1/4) Epoch 29, batch 2950, loss[loss=0.1204, simple_loss=0.2036, pruned_loss=0.01858, over 18436.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2458, pruned_loss=0.03174, over 3586734.75 frames. ], batch size: 43, lr: 3.85e-03, grad_scale: 8.0 +2023-03-10 00:07:00,627 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104705.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:07:09,435 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.771e+02 2.535e+02 2.972e+02 3.582e+02 1.059e+03, threshold=5.945e+02, percent-clipped=4.0 +2023-03-10 00:07:28,569 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5710, 4.1688, 2.6700, 3.9613, 4.0293, 4.1567, 4.0344, 2.6840], + device='cuda:1'), covar=tensor([0.0284, 0.0091, 0.0771, 0.0238, 0.0096, 0.0085, 0.0115, 0.0956], + device='cuda:1'), in_proj_covar=tensor([0.0094, 0.0084, 0.0098, 0.0100, 0.0091, 0.0080, 0.0087, 0.0098], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-10 00:07:57,844 INFO [train.py:898] (1/4) Epoch 29, batch 3000, loss[loss=0.1534, simple_loss=0.2459, pruned_loss=0.03044, over 18492.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2457, pruned_loss=0.03187, over 3582054.66 frames. ], batch size: 51, lr: 3.85e-03, grad_scale: 8.0 +2023-03-10 00:07:57,845 INFO [train.py:923] (1/4) Computing validation loss +2023-03-10 00:08:10,052 INFO [train.py:932] (1/4) Epoch 29, validation: loss=0.1493, simple_loss=0.2471, pruned_loss=0.02574, over 944034.00 frames. +2023-03-10 00:08:10,053 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-10 00:08:10,333 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104753.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:08:11,912 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.35 vs. limit=5.0 +2023-03-10 00:09:09,592 INFO [train.py:898] (1/4) Epoch 29, batch 3050, loss[loss=0.1405, simple_loss=0.2337, pruned_loss=0.02366, over 18496.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.2455, pruned_loss=0.03173, over 3581709.82 frames. ], batch size: 47, lr: 3.84e-03, grad_scale: 8.0 +2023-03-10 00:09:21,236 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.749e+02 2.540e+02 3.003e+02 3.449e+02 8.629e+02, threshold=6.007e+02, percent-clipped=3.0 +2023-03-10 00:09:38,348 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-03-10 00:09:38,970 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6349, 3.4982, 2.3075, 4.4292, 3.0348, 4.0952, 2.5393, 3.8768], + device='cuda:1'), covar=tensor([0.0717, 0.0890, 0.1549, 0.0480, 0.0971, 0.0359, 0.1291, 0.0456], + device='cuda:1'), in_proj_covar=tensor([0.0228, 0.0238, 0.0200, 0.0304, 0.0202, 0.0278, 0.0212, 0.0213], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-10 00:10:08,511 INFO [train.py:898] (1/4) Epoch 29, batch 3100, loss[loss=0.1296, simple_loss=0.2169, pruned_loss=0.02111, over 17685.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2454, pruned_loss=0.03197, over 3585423.61 frames. ], batch size: 39, lr: 3.84e-03, grad_scale: 8.0 +2023-03-10 00:10:17,451 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5109, 2.7408, 4.1499, 3.6834, 2.6453, 4.3421, 3.7025, 2.8372], + device='cuda:1'), covar=tensor([0.0551, 0.1538, 0.0308, 0.0443, 0.1545, 0.0234, 0.0676, 0.0962], + device='cuda:1'), in_proj_covar=tensor([0.0226, 0.0250, 0.0239, 0.0176, 0.0231, 0.0223, 0.0264, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-10 00:11:07,592 INFO [train.py:898] (1/4) Epoch 29, batch 3150, loss[loss=0.1444, simple_loss=0.2327, pruned_loss=0.02808, over 18297.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2456, pruned_loss=0.03177, over 3589686.74 frames. ], batch size: 49, lr: 3.84e-03, grad_scale: 8.0 +2023-03-10 00:11:19,626 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.665e+02 2.384e+02 2.834e+02 3.322e+02 6.284e+02, threshold=5.669e+02, percent-clipped=1.0 +2023-03-10 00:12:06,360 INFO [train.py:898] (1/4) Epoch 29, batch 3200, loss[loss=0.1392, simple_loss=0.2268, pruned_loss=0.02574, over 18420.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2458, pruned_loss=0.032, over 3594874.69 frames. ], batch size: 48, lr: 3.84e-03, grad_scale: 8.0 +2023-03-10 00:12:07,151 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.26 vs. limit=5.0 +2023-03-10 00:13:02,332 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-10 00:13:06,492 INFO [train.py:898] (1/4) Epoch 29, batch 3250, loss[loss=0.1459, simple_loss=0.2462, pruned_loss=0.02277, over 18366.00 frames. ], tot_loss[loss=0.1548, simple_loss=0.2457, pruned_loss=0.03193, over 3602280.63 frames. ], batch size: 55, lr: 3.84e-03, grad_scale: 8.0 +2023-03-10 00:13:17,603 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.817e+02 2.551e+02 3.060e+02 3.706e+02 7.132e+02, threshold=6.121e+02, percent-clipped=2.0 +2023-03-10 00:13:32,808 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-10 00:14:05,587 INFO [train.py:898] (1/4) Epoch 29, batch 3300, loss[loss=0.1468, simple_loss=0.2375, pruned_loss=0.02809, over 18293.00 frames. ], tot_loss[loss=0.1552, simple_loss=0.246, pruned_loss=0.03218, over 3594055.33 frames. ], batch size: 49, lr: 3.84e-03, grad_scale: 8.0 +2023-03-10 00:14:12,557 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7422, 3.0683, 2.7725, 3.0493, 3.7763, 3.7158, 3.2925, 3.0563], + device='cuda:1'), covar=tensor([0.0161, 0.0257, 0.0529, 0.0366, 0.0180, 0.0179, 0.0346, 0.0409], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0149, 0.0168, 0.0168, 0.0145, 0.0132, 0.0165, 0.0167], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-10 00:14:42,119 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8696, 3.7299, 5.1418, 4.5188, 3.4744, 3.2501, 4.5345, 5.3698], + device='cuda:1'), covar=tensor([0.0804, 0.1441, 0.0165, 0.0371, 0.0879, 0.1141, 0.0382, 0.0212], + device='cuda:1'), in_proj_covar=tensor([0.0156, 0.0285, 0.0177, 0.0188, 0.0199, 0.0197, 0.0204, 0.0217], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-10 00:14:54,126 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6578, 2.5983, 2.6401, 2.5103, 2.5731, 2.2730, 2.3898, 2.6649], + device='cuda:1'), covar=tensor([0.0096, 0.0124, 0.0091, 0.0122, 0.0105, 0.0176, 0.0181, 0.0093], + device='cuda:1'), in_proj_covar=tensor([0.0159, 0.0179, 0.0149, 0.0199, 0.0159, 0.0190, 0.0193, 0.0139], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-10 00:15:03,801 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6445, 4.2931, 4.2397, 3.2274, 3.5561, 3.3767, 2.5912, 2.5175], + device='cuda:1'), covar=tensor([0.0246, 0.0140, 0.0091, 0.0327, 0.0366, 0.0244, 0.0703, 0.0799], + device='cuda:1'), in_proj_covar=tensor([0.0076, 0.0064, 0.0070, 0.0073, 0.0093, 0.0071, 0.0080, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006], + device='cuda:1') +2023-03-10 00:15:04,537 INFO [train.py:898] (1/4) Epoch 29, batch 3350, loss[loss=0.1489, simple_loss=0.2424, pruned_loss=0.02769, over 17888.00 frames. ], tot_loss[loss=0.1557, simple_loss=0.2465, pruned_loss=0.0325, over 3573254.59 frames. ], batch size: 70, lr: 3.84e-03, grad_scale: 8.0 +2023-03-10 00:15:08,408 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.16 vs. limit=5.0 +2023-03-10 00:15:15,596 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.933e+02 2.604e+02 2.932e+02 3.657e+02 1.346e+03, threshold=5.864e+02, percent-clipped=3.0 +2023-03-10 00:15:32,789 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.84 vs. limit=5.0 +2023-03-10 00:16:03,631 INFO [train.py:898] (1/4) Epoch 29, batch 3400, loss[loss=0.1632, simple_loss=0.2552, pruned_loss=0.03561, over 18296.00 frames. ], tot_loss[loss=0.1557, simple_loss=0.2467, pruned_loss=0.03239, over 3578860.47 frames. ], batch size: 49, lr: 3.84e-03, grad_scale: 8.0 +2023-03-10 00:16:28,115 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-10 00:17:01,947 INFO [train.py:898] (1/4) Epoch 29, batch 3450, loss[loss=0.1583, simple_loss=0.2533, pruned_loss=0.03162, over 18612.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2457, pruned_loss=0.03245, over 3577227.74 frames. ], batch size: 52, lr: 3.84e-03, grad_scale: 8.0 +2023-03-10 00:17:13,784 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.580e+02 2.383e+02 2.808e+02 3.318e+02 5.892e+02, threshold=5.615e+02, percent-clipped=1.0 +2023-03-10 00:17:38,616 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9697, 5.5780, 3.0081, 5.3916, 5.3294, 5.5634, 5.4349, 2.8113], + device='cuda:1'), covar=tensor([0.0223, 0.0050, 0.0675, 0.0057, 0.0059, 0.0057, 0.0061, 0.0895], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0084, 0.0098, 0.0100, 0.0091, 0.0080, 0.0087, 0.0098], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-10 00:18:00,553 INFO [train.py:898] (1/4) Epoch 29, batch 3500, loss[loss=0.1607, simple_loss=0.2552, pruned_loss=0.03311, over 18304.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2464, pruned_loss=0.03241, over 3576573.05 frames. ], batch size: 57, lr: 3.84e-03, grad_scale: 8.0 +2023-03-10 00:18:07,094 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4258, 5.3948, 5.0409, 5.3298, 5.2975, 4.7309, 5.2157, 4.9642], + device='cuda:1'), covar=tensor([0.0405, 0.0459, 0.1248, 0.0777, 0.0618, 0.0419, 0.0439, 0.1126], + device='cuda:1'), in_proj_covar=tensor([0.0520, 0.0589, 0.0727, 0.0458, 0.0489, 0.0539, 0.0567, 0.0708], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-10 00:18:16,600 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-03-10 00:18:27,537 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7548, 4.0678, 2.5169, 3.9648, 5.1656, 2.4282, 3.8522, 3.9367], + device='cuda:1'), covar=tensor([0.0300, 0.1226, 0.1734, 0.0771, 0.0127, 0.1401, 0.0707, 0.0771], + device='cuda:1'), in_proj_covar=tensor([0.0189, 0.0287, 0.0213, 0.0204, 0.0148, 0.0188, 0.0225, 0.0234], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-10 00:18:55,796 INFO [train.py:898] (1/4) Epoch 29, batch 3550, loss[loss=0.1465, simple_loss=0.2348, pruned_loss=0.02907, over 18260.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2462, pruned_loss=0.03221, over 3591696.65 frames. ], batch size: 45, lr: 3.83e-03, grad_scale: 8.0 +2023-03-10 00:18:59,265 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8083, 5.3096, 5.2897, 5.2825, 4.7434, 5.1896, 4.6737, 5.1918], + device='cuda:1'), covar=tensor([0.0269, 0.0287, 0.0210, 0.0453, 0.0423, 0.0247, 0.1068, 0.0321], + device='cuda:1'), in_proj_covar=tensor([0.0236, 0.0280, 0.0282, 0.0363, 0.0290, 0.0291, 0.0322, 0.0283], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-10 00:19:06,453 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.798e+02 2.434e+02 2.865e+02 3.612e+02 9.086e+02, threshold=5.730e+02, percent-clipped=2.0 +2023-03-10 00:19:15,167 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7459, 4.2025, 2.2767, 3.9864, 5.2428, 2.7549, 3.6129, 3.8391], + device='cuda:1'), covar=tensor([0.0250, 0.1237, 0.1892, 0.0769, 0.0100, 0.1200, 0.0854, 0.0999], + device='cuda:1'), in_proj_covar=tensor([0.0189, 0.0287, 0.0212, 0.0204, 0.0148, 0.0188, 0.0225, 0.0234], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-10 00:19:50,743 INFO [train.py:898] (1/4) Epoch 29, batch 3600, loss[loss=0.1278, simple_loss=0.2109, pruned_loss=0.02234, over 18450.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2458, pruned_loss=0.03181, over 3605130.26 frames. ], batch size: 43, lr: 3.83e-03, grad_scale: 8.0 +2023-03-10 00:20:53,444 INFO [train.py:898] (1/4) Epoch 30, batch 0, loss[loss=0.1661, simple_loss=0.258, pruned_loss=0.03706, over 16921.00 frames. ], tot_loss[loss=0.1661, simple_loss=0.258, pruned_loss=0.03706, over 16921.00 frames. ], batch size: 78, lr: 3.77e-03, grad_scale: 8.0 +2023-03-10 00:20:53,444 INFO [train.py:923] (1/4) Computing validation loss +2023-03-10 00:21:02,355 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.7677, 4.6733, 4.7487, 4.4384, 4.4421, 4.5039, 4.8025, 4.8100], + device='cuda:1'), covar=tensor([0.0082, 0.0071, 0.0070, 0.0137, 0.0086, 0.0158, 0.0092, 0.0115], + device='cuda:1'), in_proj_covar=tensor([0.0101, 0.0075, 0.0080, 0.0101, 0.0080, 0.0108, 0.0092, 0.0092], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-10 00:21:05,357 INFO [train.py:932] (1/4) Epoch 30, validation: loss=0.1503, simple_loss=0.2477, pruned_loss=0.02643, over 944034.00 frames. +2023-03-10 00:21:05,358 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-10 00:21:06,906 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=105388.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:21:11,634 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1344, 3.9075, 5.3382, 3.2959, 4.7581, 2.7843, 3.3024, 1.9734], + device='cuda:1'), covar=tensor([0.1146, 0.0983, 0.0167, 0.0934, 0.0468, 0.2790, 0.2744, 0.2344], + device='cuda:1'), in_proj_covar=tensor([0.0234, 0.0256, 0.0235, 0.0209, 0.0268, 0.0283, 0.0340, 0.0250], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-10 00:21:36,266 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.589e+02 2.529e+02 2.877e+02 3.490e+02 8.878e+02, threshold=5.754e+02, percent-clipped=2.0 +2023-03-10 00:22:04,708 INFO [train.py:898] (1/4) Epoch 30, batch 50, loss[loss=0.1702, simple_loss=0.2604, pruned_loss=0.03996, over 15858.00 frames. ], tot_loss[loss=0.1535, simple_loss=0.2447, pruned_loss=0.03113, over 815092.55 frames. ], batch size: 94, lr: 3.77e-03, grad_scale: 4.0 +2023-03-10 00:22:18,504 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=105449.0, num_to_drop=1, layers_to_drop={3} +2023-03-10 00:22:26,470 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.11 vs. limit=5.0 +2023-03-10 00:22:30,155 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.3979, 5.3031, 5.6784, 5.7554, 5.3053, 6.2071, 5.8843, 5.5167], + device='cuda:1'), covar=tensor([0.1070, 0.0638, 0.0729, 0.0692, 0.1378, 0.0646, 0.0726, 0.1717], + device='cuda:1'), in_proj_covar=tensor([0.0377, 0.0309, 0.0337, 0.0340, 0.0343, 0.0451, 0.0305, 0.0443], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-10 00:23:03,189 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=105486.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:23:03,939 INFO [train.py:898] (1/4) Epoch 30, batch 100, loss[loss=0.1735, simple_loss=0.273, pruned_loss=0.03698, over 16101.00 frames. ], tot_loss[loss=0.1531, simple_loss=0.2448, pruned_loss=0.03066, over 1438387.11 frames. ], batch size: 94, lr: 3.77e-03, grad_scale: 2.0 +2023-03-10 00:23:21,382 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=105502.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:23:36,854 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.707e+02 2.490e+02 2.952e+02 3.451e+02 6.193e+02, threshold=5.904e+02, percent-clipped=1.0 +2023-03-10 00:23:50,602 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7236, 2.4165, 2.6943, 2.8051, 3.2900, 4.7972, 4.8557, 3.1648], + device='cuda:1'), covar=tensor([0.2169, 0.2575, 0.3205, 0.1967, 0.2530, 0.0300, 0.0326, 0.1209], + device='cuda:1'), in_proj_covar=tensor([0.0339, 0.0370, 0.0423, 0.0297, 0.0404, 0.0273, 0.0306, 0.0278], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-03-10 00:24:02,926 INFO [train.py:898] (1/4) Epoch 30, batch 150, loss[loss=0.1616, simple_loss=0.2525, pruned_loss=0.03531, over 18619.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.2455, pruned_loss=0.0314, over 1918369.88 frames. ], batch size: 52, lr: 3.77e-03, grad_scale: 2.0 +2023-03-10 00:24:14,932 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=105547.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:24:23,057 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.00 vs. limit=5.0 +2023-03-10 00:24:33,635 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=105563.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:25:02,151 INFO [train.py:898] (1/4) Epoch 30, batch 200, loss[loss=0.1461, simple_loss=0.237, pruned_loss=0.02758, over 18402.00 frames. ], tot_loss[loss=0.1539, simple_loss=0.2451, pruned_loss=0.03135, over 2291293.54 frames. ], batch size: 50, lr: 3.76e-03, grad_scale: 2.0 +2023-03-10 00:25:06,919 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8969, 4.1753, 2.5680, 4.0330, 5.2424, 2.8403, 3.9485, 3.9960], + device='cuda:1'), covar=tensor([0.0235, 0.1198, 0.1639, 0.0699, 0.0106, 0.1156, 0.0665, 0.0770], + device='cuda:1'), in_proj_covar=tensor([0.0188, 0.0285, 0.0212, 0.0203, 0.0147, 0.0187, 0.0224, 0.0233], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-10 00:25:34,558 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.818e+02 2.661e+02 3.049e+02 3.613e+02 6.015e+02, threshold=6.098e+02, percent-clipped=1.0 +2023-03-10 00:25:34,984 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5519, 3.3329, 2.1724, 4.3093, 3.0665, 4.0521, 2.3440, 3.7545], + device='cuda:1'), covar=tensor([0.0653, 0.1016, 0.1570, 0.0502, 0.0914, 0.0339, 0.1384, 0.0463], + device='cuda:1'), in_proj_covar=tensor([0.0226, 0.0237, 0.0200, 0.0305, 0.0201, 0.0276, 0.0212, 0.0213], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-10 00:26:01,239 INFO [train.py:898] (1/4) Epoch 30, batch 250, loss[loss=0.165, simple_loss=0.2651, pruned_loss=0.03247, over 18083.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2463, pruned_loss=0.03152, over 2577414.06 frames. ], batch size: 62, lr: 3.76e-03, grad_scale: 2.0 +2023-03-10 00:26:16,612 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-10 00:26:29,608 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8830, 5.4263, 2.5692, 5.1980, 5.1006, 5.3957, 5.2520, 2.6611], + device='cuda:1'), covar=tensor([0.0262, 0.0059, 0.0901, 0.0084, 0.0091, 0.0072, 0.0080, 0.1034], + device='cuda:1'), in_proj_covar=tensor([0.0093, 0.0083, 0.0097, 0.0099, 0.0090, 0.0080, 0.0087, 0.0098], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:1') +2023-03-10 00:26:56,613 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.3165, 2.4326, 3.9760, 3.5517, 2.2257, 4.1315, 3.5765, 2.5757], + device='cuda:1'), covar=tensor([0.0588, 0.1878, 0.0339, 0.0431, 0.2045, 0.0278, 0.0646, 0.1350], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0247, 0.0239, 0.0176, 0.0227, 0.0222, 0.0261, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-10 00:26:59,685 INFO [train.py:898] (1/4) Epoch 30, batch 300, loss[loss=0.1698, simple_loss=0.2658, pruned_loss=0.03695, over 17947.00 frames. ], tot_loss[loss=0.155, simple_loss=0.2464, pruned_loss=0.03184, over 2786587.67 frames. ], batch size: 65, lr: 3.76e-03, grad_scale: 2.0 +2023-03-10 00:27:07,407 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4700, 2.9465, 4.3786, 3.4870, 2.6539, 4.5593, 3.8534, 2.9418], + device='cuda:1'), covar=tensor([0.0659, 0.1441, 0.0273, 0.0573, 0.1577, 0.0244, 0.0540, 0.0975], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0247, 0.0239, 0.0176, 0.0228, 0.0223, 0.0261, 0.0201], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-10 00:27:32,634 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.857e+02 2.459e+02 2.793e+02 3.329e+02 5.201e+02, threshold=5.586e+02, percent-clipped=0.0 +2023-03-10 00:27:58,846 INFO [train.py:898] (1/4) Epoch 30, batch 350, loss[loss=0.156, simple_loss=0.2506, pruned_loss=0.03072, over 16939.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2449, pruned_loss=0.0313, over 2969864.44 frames. ], batch size: 78, lr: 3.76e-03, grad_scale: 2.0 +2023-03-10 00:28:08,226 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105744.0, num_to_drop=1, layers_to_drop={0} +2023-03-10 00:28:57,668 INFO [train.py:898] (1/4) Epoch 30, batch 400, loss[loss=0.1385, simple_loss=0.2223, pruned_loss=0.02738, over 18499.00 frames. ], tot_loss[loss=0.1539, simple_loss=0.2451, pruned_loss=0.03137, over 3117577.35 frames. ], batch size: 47, lr: 3.76e-03, grad_scale: 4.0 +2023-03-10 00:29:03,566 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=105792.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:29:12,623 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=105799.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:29:30,416 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.921e+02 2.502e+02 2.978e+02 3.750e+02 7.883e+02, threshold=5.955e+02, percent-clipped=2.0 +2023-03-10 00:29:35,869 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-03-10 00:29:47,179 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5872, 4.2360, 4.1759, 3.2413, 3.5178, 3.2306, 2.4374, 2.4099], + device='cuda:1'), covar=tensor([0.0245, 0.0150, 0.0107, 0.0362, 0.0343, 0.0267, 0.0832, 0.0904], + device='cuda:1'), in_proj_covar=tensor([0.0077, 0.0064, 0.0071, 0.0073, 0.0093, 0.0072, 0.0080, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0006, 0.0005, 0.0005, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006], + device='cuda:1') +2023-03-10 00:29:55,602 INFO [train.py:898] (1/4) Epoch 30, batch 450, loss[loss=0.1265, simple_loss=0.2037, pruned_loss=0.02468, over 18374.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2448, pruned_loss=0.03142, over 3212580.22 frames. ], batch size: 42, lr: 3.76e-03, grad_scale: 4.0 +2023-03-10 00:30:01,522 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105842.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:30:15,350 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=105853.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:30:20,890 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105858.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:30:23,534 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=105860.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:30:54,159 INFO [train.py:898] (1/4) Epoch 30, batch 500, loss[loss=0.1428, simple_loss=0.2358, pruned_loss=0.02496, over 18384.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2453, pruned_loss=0.03143, over 3300348.96 frames. ], batch size: 50, lr: 3.76e-03, grad_scale: 4.0 +2023-03-10 00:31:21,509 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.6360, 5.5887, 5.2607, 5.5511, 5.5321, 5.0043, 5.4659, 5.1906], + device='cuda:1'), covar=tensor([0.0421, 0.0425, 0.1294, 0.0775, 0.0595, 0.0403, 0.0421, 0.1069], + device='cuda:1'), in_proj_covar=tensor([0.0519, 0.0588, 0.0725, 0.0456, 0.0491, 0.0535, 0.0566, 0.0707], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-10 00:31:25,880 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.730e+02 2.387e+02 2.835e+02 3.555e+02 6.672e+02, threshold=5.669e+02, percent-clipped=2.0 +2023-03-10 00:31:51,016 INFO [train.py:898] (1/4) Epoch 30, batch 550, loss[loss=0.1624, simple_loss=0.2545, pruned_loss=0.03515, over 18494.00 frames. ], tot_loss[loss=0.154, simple_loss=0.245, pruned_loss=0.03147, over 3367101.93 frames. ], batch size: 51, lr: 3.76e-03, grad_scale: 4.0 +2023-03-10 00:32:20,090 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6850, 4.7530, 4.8353, 4.5048, 4.5279, 4.5264, 4.8561, 4.9014], + device='cuda:1'), covar=tensor([0.0080, 0.0076, 0.0076, 0.0135, 0.0070, 0.0189, 0.0080, 0.0094], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0077, 0.0082, 0.0103, 0.0082, 0.0112, 0.0094, 0.0094], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-10 00:32:47,909 INFO [train.py:898] (1/4) Epoch 30, batch 600, loss[loss=0.1582, simple_loss=0.2517, pruned_loss=0.03239, over 18394.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.2456, pruned_loss=0.03142, over 3424167.53 frames. ], batch size: 52, lr: 3.76e-03, grad_scale: 4.0 +2023-03-10 00:33:26,793 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.547e+02 2.468e+02 2.895e+02 3.494e+02 7.378e+02, threshold=5.790e+02, percent-clipped=2.0 +2023-03-10 00:33:51,606 INFO [train.py:898] (1/4) Epoch 30, batch 650, loss[loss=0.1545, simple_loss=0.2456, pruned_loss=0.03174, over 18227.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2451, pruned_loss=0.0312, over 3472483.69 frames. ], batch size: 60, lr: 3.76e-03, grad_scale: 4.0 +2023-03-10 00:34:00,493 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106044.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:34:29,798 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-10 00:34:38,622 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106077.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:34:49,502 INFO [train.py:898] (1/4) Epoch 30, batch 700, loss[loss=0.1381, simple_loss=0.2259, pruned_loss=0.02513, over 18500.00 frames. ], tot_loss[loss=0.153, simple_loss=0.2443, pruned_loss=0.03087, over 3513220.90 frames. ], batch size: 47, lr: 3.76e-03, grad_scale: 4.0 +2023-03-10 00:34:55,336 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106092.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:35:14,027 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-10 00:35:22,424 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.614e+02 2.517e+02 2.950e+02 3.495e+02 6.202e+02, threshold=5.900e+02, percent-clipped=1.0 +2023-03-10 00:35:48,487 INFO [train.py:898] (1/4) Epoch 30, batch 750, loss[loss=0.1274, simple_loss=0.2188, pruned_loss=0.018, over 18245.00 frames. ], tot_loss[loss=0.1531, simple_loss=0.2445, pruned_loss=0.03086, over 3529498.10 frames. ], batch size: 45, lr: 3.75e-03, grad_scale: 4.0 +2023-03-10 00:35:49,956 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106138.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:35:54,356 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106142.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:36:01,524 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106148.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:36:09,951 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106155.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:36:13,383 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106158.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:36:46,888 INFO [train.py:898] (1/4) Epoch 30, batch 800, loss[loss=0.1645, simple_loss=0.2552, pruned_loss=0.03693, over 18283.00 frames. ], tot_loss[loss=0.1535, simple_loss=0.2447, pruned_loss=0.03119, over 3543896.79 frames. ], batch size: 57, lr: 3.75e-03, grad_scale: 8.0 +2023-03-10 00:36:50,256 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106190.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:37:09,721 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106206.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:37:19,670 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.545e+02 2.490e+02 2.877e+02 3.308e+02 6.408e+02, threshold=5.753e+02, percent-clipped=1.0 +2023-03-10 00:37:45,816 INFO [train.py:898] (1/4) Epoch 30, batch 850, loss[loss=0.1563, simple_loss=0.2496, pruned_loss=0.03154, over 17287.00 frames. ], tot_loss[loss=0.1536, simple_loss=0.2448, pruned_loss=0.03121, over 3547092.30 frames. ], batch size: 78, lr: 3.75e-03, grad_scale: 8.0 +2023-03-10 00:38:11,947 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.49 vs. limit=2.0 +2023-03-10 00:38:19,527 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9325, 3.2447, 4.5829, 4.0460, 3.1238, 4.8486, 4.1440, 3.2775], + device='cuda:1'), covar=tensor([0.0450, 0.1291, 0.0281, 0.0413, 0.1310, 0.0221, 0.0529, 0.0859], + device='cuda:1'), in_proj_covar=tensor([0.0223, 0.0245, 0.0238, 0.0173, 0.0225, 0.0220, 0.0259, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-10 00:38:21,722 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106268.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:38:42,938 INFO [train.py:898] (1/4) Epoch 30, batch 900, loss[loss=0.1438, simple_loss=0.2386, pruned_loss=0.02451, over 18497.00 frames. ], tot_loss[loss=0.1536, simple_loss=0.2446, pruned_loss=0.03126, over 3565311.85 frames. ], batch size: 51, lr: 3.75e-03, grad_scale: 8.0 +2023-03-10 00:39:06,052 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9277, 5.4189, 2.8386, 5.2421, 5.1448, 5.3911, 5.2027, 2.6786], + device='cuda:1'), covar=tensor([0.0245, 0.0053, 0.0766, 0.0071, 0.0070, 0.0071, 0.0089, 0.0993], + device='cuda:1'), in_proj_covar=tensor([0.0094, 0.0085, 0.0099, 0.0101, 0.0092, 0.0081, 0.0088, 0.0099], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-10 00:39:15,256 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.738e+02 2.494e+02 3.069e+02 3.732e+02 1.058e+03, threshold=6.138e+02, percent-clipped=4.0 +2023-03-10 00:39:27,411 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-03-10 00:39:31,401 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106329.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:39:39,789 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106336.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:39:40,560 INFO [train.py:898] (1/4) Epoch 30, batch 950, loss[loss=0.1939, simple_loss=0.2709, pruned_loss=0.05845, over 12411.00 frames. ], tot_loss[loss=0.1531, simple_loss=0.244, pruned_loss=0.0311, over 3570032.39 frames. ], batch size: 130, lr: 3.75e-03, grad_scale: 8.0 +2023-03-10 00:39:40,943 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5638, 3.0239, 4.4005, 3.6156, 2.6102, 4.5477, 4.0086, 2.7831], + device='cuda:1'), covar=tensor([0.0588, 0.1328, 0.0290, 0.0523, 0.1627, 0.0252, 0.0541, 0.1018], + device='cuda:1'), in_proj_covar=tensor([0.0223, 0.0245, 0.0238, 0.0173, 0.0225, 0.0220, 0.0259, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-10 00:40:39,192 INFO [train.py:898] (1/4) Epoch 30, batch 1000, loss[loss=0.1753, simple_loss=0.267, pruned_loss=0.04185, over 18130.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2446, pruned_loss=0.03095, over 3582529.76 frames. ], batch size: 62, lr: 3.75e-03, grad_scale: 8.0 +2023-03-10 00:40:50,684 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106397.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:41:00,936 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2068, 5.1899, 5.4798, 5.5155, 5.1223, 5.9971, 5.5967, 5.1229], + device='cuda:1'), covar=tensor([0.1116, 0.0664, 0.0742, 0.0704, 0.1434, 0.0706, 0.0732, 0.1814], + device='cuda:1'), in_proj_covar=tensor([0.0378, 0.0308, 0.0337, 0.0339, 0.0345, 0.0448, 0.0305, 0.0446], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-10 00:41:06,562 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.61 vs. limit=2.0 +2023-03-10 00:41:11,526 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.702e+02 2.421e+02 2.824e+02 3.495e+02 5.852e+02, threshold=5.647e+02, percent-clipped=0.0 +2023-03-10 00:41:11,782 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0261, 4.5177, 4.2626, 4.3699, 4.0882, 4.7724, 4.4504, 4.1476], + device='cuda:1'), covar=tensor([0.1521, 0.1186, 0.1048, 0.0855, 0.1653, 0.1205, 0.0834, 0.1757], + device='cuda:1'), in_proj_covar=tensor([0.0378, 0.0308, 0.0337, 0.0339, 0.0345, 0.0449, 0.0306, 0.0446], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-10 00:41:20,484 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.9779, 5.0032, 5.0588, 4.7935, 4.8180, 4.8879, 5.1733, 5.1731], + device='cuda:1'), covar=tensor([0.0070, 0.0065, 0.0055, 0.0116, 0.0064, 0.0158, 0.0067, 0.0088], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0077, 0.0083, 0.0104, 0.0083, 0.0113, 0.0095, 0.0095], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-10 00:41:32,642 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106433.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:41:37,594 INFO [train.py:898] (1/4) Epoch 30, batch 1050, loss[loss=0.1566, simple_loss=0.2507, pruned_loss=0.0312, over 18007.00 frames. ], tot_loss[loss=0.1539, simple_loss=0.2454, pruned_loss=0.03114, over 3592960.43 frames. ], batch size: 65, lr: 3.75e-03, grad_scale: 8.0 +2023-03-10 00:41:51,035 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106448.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:41:57,866 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106454.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:41:58,933 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106455.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:42:23,846 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8086, 3.2788, 4.5732, 3.8103, 2.7160, 4.8205, 4.1719, 3.2326], + device='cuda:1'), covar=tensor([0.0511, 0.1249, 0.0275, 0.0495, 0.1507, 0.0211, 0.0487, 0.0879], + device='cuda:1'), in_proj_covar=tensor([0.0223, 0.0245, 0.0237, 0.0173, 0.0225, 0.0220, 0.0258, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-10 00:42:33,723 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8398, 3.8000, 5.1101, 4.4606, 3.3839, 3.1123, 4.5053, 5.3481], + device='cuda:1'), covar=tensor([0.0868, 0.1432, 0.0195, 0.0384, 0.0953, 0.1164, 0.0387, 0.0201], + device='cuda:1'), in_proj_covar=tensor([0.0156, 0.0287, 0.0179, 0.0189, 0.0201, 0.0197, 0.0204, 0.0218], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-10 00:42:35,946 INFO [train.py:898] (1/4) Epoch 30, batch 1100, loss[loss=0.1728, simple_loss=0.2686, pruned_loss=0.03853, over 16270.00 frames. ], tot_loss[loss=0.1534, simple_loss=0.2447, pruned_loss=0.03105, over 3598560.03 frames. ], batch size: 94, lr: 3.75e-03, grad_scale: 8.0 +2023-03-10 00:42:36,440 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9450, 3.7661, 4.9102, 4.1960, 3.3635, 2.8929, 4.2710, 5.1347], + device='cuda:1'), covar=tensor([0.0814, 0.1311, 0.0290, 0.0504, 0.1005, 0.1340, 0.0528, 0.0552], + device='cuda:1'), in_proj_covar=tensor([0.0156, 0.0287, 0.0179, 0.0189, 0.0201, 0.0197, 0.0204, 0.0218], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-10 00:42:46,761 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106496.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:42:54,830 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106503.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:43:08,575 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.832e+02 2.448e+02 2.852e+02 3.322e+02 5.869e+02, threshold=5.705e+02, percent-clipped=1.0 +2023-03-10 00:43:09,021 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106515.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:43:33,593 INFO [train.py:898] (1/4) Epoch 30, batch 1150, loss[loss=0.1543, simple_loss=0.2452, pruned_loss=0.03167, over 18504.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2449, pruned_loss=0.03128, over 3599566.22 frames. ], batch size: 51, lr: 3.75e-03, grad_scale: 8.0 +2023-03-10 00:43:51,950 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8930, 3.6253, 4.9210, 4.2951, 3.2868, 2.9575, 4.3474, 5.1586], + device='cuda:1'), covar=tensor([0.0754, 0.1378, 0.0234, 0.0406, 0.0990, 0.1183, 0.0407, 0.0205], + device='cuda:1'), in_proj_covar=tensor([0.0157, 0.0288, 0.0180, 0.0190, 0.0202, 0.0198, 0.0205, 0.0219], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-10 00:44:32,010 INFO [train.py:898] (1/4) Epoch 30, batch 1200, loss[loss=0.1529, simple_loss=0.2417, pruned_loss=0.0321, over 18405.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2447, pruned_loss=0.03143, over 3586505.03 frames. ], batch size: 50, lr: 3.75e-03, grad_scale: 8.0 +2023-03-10 00:45:04,656 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.878e+02 2.700e+02 3.058e+02 3.812e+02 6.890e+02, threshold=6.116e+02, percent-clipped=3.0 +2023-03-10 00:45:15,360 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106624.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:45:30,329 INFO [train.py:898] (1/4) Epoch 30, batch 1250, loss[loss=0.1623, simple_loss=0.2557, pruned_loss=0.03449, over 18286.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2448, pruned_loss=0.03138, over 3587117.62 frames. ], batch size: 57, lr: 3.75e-03, grad_scale: 8.0 +2023-03-10 00:45:55,850 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8433, 4.5932, 4.6303, 3.6186, 3.8091, 3.5188, 2.8370, 2.6692], + device='cuda:1'), covar=tensor([0.0258, 0.0156, 0.0081, 0.0287, 0.0343, 0.0253, 0.0652, 0.0793], + device='cuda:1'), in_proj_covar=tensor([0.0077, 0.0064, 0.0071, 0.0073, 0.0092, 0.0071, 0.0079, 0.0087], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006], + device='cuda:1') +2023-03-10 00:46:00,791 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-10 00:46:28,923 INFO [train.py:898] (1/4) Epoch 30, batch 1300, loss[loss=0.1523, simple_loss=0.2476, pruned_loss=0.02847, over 18508.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2451, pruned_loss=0.03149, over 3586612.17 frames. ], batch size: 53, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:46:34,898 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106692.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:46:44,272 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.1855, 2.6013, 2.3419, 2.5842, 3.2375, 3.1279, 2.8561, 2.6385], + device='cuda:1'), covar=tensor([0.0197, 0.0313, 0.0595, 0.0435, 0.0232, 0.0207, 0.0452, 0.0412], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0152, 0.0170, 0.0170, 0.0146, 0.0133, 0.0165, 0.0168], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-10 00:47:01,417 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.810e+02 2.359e+02 2.806e+02 3.557e+02 5.057e+02, threshold=5.611e+02, percent-clipped=0.0 +2023-03-10 00:47:22,082 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106733.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:47:26,896 INFO [train.py:898] (1/4) Epoch 30, batch 1350, loss[loss=0.1704, simple_loss=0.2651, pruned_loss=0.03783, over 18471.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.2458, pruned_loss=0.03162, over 3589665.21 frames. ], batch size: 59, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:47:27,312 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.5235, 2.2899, 2.4622, 2.5058, 2.9565, 4.4738, 4.5302, 3.1616], + device='cuda:1'), covar=tensor([0.2272, 0.2790, 0.3395, 0.2197, 0.2931, 0.0362, 0.0380, 0.1094], + device='cuda:1'), in_proj_covar=tensor([0.0339, 0.0367, 0.0422, 0.0294, 0.0402, 0.0272, 0.0305, 0.0278], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:1') +2023-03-10 00:48:18,857 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106781.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:48:25,443 INFO [train.py:898] (1/4) Epoch 30, batch 1400, loss[loss=0.1763, simple_loss=0.2657, pruned_loss=0.04349, over 18225.00 frames. ], tot_loss[loss=0.1535, simple_loss=0.2446, pruned_loss=0.03124, over 3593191.89 frames. ], batch size: 60, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:48:45,021 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-10 00:48:52,859 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106810.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:48:58,341 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.725e+02 2.645e+02 3.103e+02 3.852e+02 9.390e+02, threshold=6.206e+02, percent-clipped=1.0 +2023-03-10 00:49:00,902 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4811, 5.4418, 5.1018, 5.3616, 5.3811, 4.8255, 5.3073, 5.0552], + device='cuda:1'), covar=tensor([0.0403, 0.0427, 0.1273, 0.0743, 0.0617, 0.0409, 0.0414, 0.1052], + device='cuda:1'), in_proj_covar=tensor([0.0529, 0.0601, 0.0742, 0.0465, 0.0500, 0.0546, 0.0580, 0.0724], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0005, 0.0005, 0.0006], + device='cuda:1') +2023-03-10 00:49:23,850 INFO [train.py:898] (1/4) Epoch 30, batch 1450, loss[loss=0.1357, simple_loss=0.2239, pruned_loss=0.02377, over 18417.00 frames. ], tot_loss[loss=0.1539, simple_loss=0.245, pruned_loss=0.03143, over 3572831.99 frames. ], batch size: 43, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:49:54,960 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8388, 3.4287, 2.6222, 3.2983, 4.0172, 2.5472, 3.3426, 3.4421], + device='cuda:1'), covar=tensor([0.0300, 0.1151, 0.1371, 0.0699, 0.0215, 0.1162, 0.0695, 0.0712], + device='cuda:1'), in_proj_covar=tensor([0.0188, 0.0286, 0.0212, 0.0204, 0.0148, 0.0187, 0.0225, 0.0233], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-10 00:50:22,116 INFO [train.py:898] (1/4) Epoch 30, batch 1500, loss[loss=0.1656, simple_loss=0.2602, pruned_loss=0.03544, over 18305.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2456, pruned_loss=0.03175, over 3573349.90 frames. ], batch size: 54, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:50:55,405 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.852e+02 2.528e+02 2.938e+02 3.558e+02 8.053e+02, threshold=5.876e+02, percent-clipped=1.0 +2023-03-10 00:51:05,828 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106924.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:51:20,623 INFO [train.py:898] (1/4) Epoch 30, batch 1550, loss[loss=0.1705, simple_loss=0.2638, pruned_loss=0.03862, over 18482.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2461, pruned_loss=0.03186, over 3566368.46 frames. ], batch size: 59, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:51:29,932 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6515, 3.1080, 3.8211, 3.5820, 2.9317, 2.8681, 3.5318, 3.9679], + device='cuda:1'), covar=tensor([0.0695, 0.1218, 0.0339, 0.0502, 0.0921, 0.1075, 0.0503, 0.0385], + device='cuda:1'), in_proj_covar=tensor([0.0158, 0.0289, 0.0181, 0.0191, 0.0202, 0.0199, 0.0207, 0.0221], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-10 00:52:01,403 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106972.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:52:18,619 INFO [train.py:898] (1/4) Epoch 30, batch 1600, loss[loss=0.1423, simple_loss=0.2324, pruned_loss=0.02616, over 18366.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.2451, pruned_loss=0.03169, over 3576806.45 frames. ], batch size: 46, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:52:20,116 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7077, 3.4931, 2.1992, 4.3819, 3.1175, 3.6906, 2.3369, 3.8166], + device='cuda:1'), covar=tensor([0.0518, 0.0793, 0.1480, 0.0512, 0.0776, 0.0423, 0.1390, 0.0488], + device='cuda:1'), in_proj_covar=tensor([0.0224, 0.0233, 0.0197, 0.0300, 0.0199, 0.0272, 0.0208, 0.0210], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-10 00:52:24,700 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106992.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:52:50,787 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.531e+02 2.519e+02 3.033e+02 3.655e+02 1.046e+03, threshold=6.066e+02, percent-clipped=4.0 +2023-03-10 00:53:16,396 INFO [train.py:898] (1/4) Epoch 30, batch 1650, loss[loss=0.1576, simple_loss=0.252, pruned_loss=0.03157, over 16065.00 frames. ], tot_loss[loss=0.1548, simple_loss=0.246, pruned_loss=0.03185, over 3580675.53 frames. ], batch size: 95, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:53:20,438 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=107040.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:53:53,691 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.51 vs. limit=5.0 +2023-03-10 00:54:02,489 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8406, 4.5254, 4.5664, 3.5618, 3.7202, 3.5345, 2.6178, 2.5611], + device='cuda:1'), covar=tensor([0.0248, 0.0175, 0.0093, 0.0337, 0.0375, 0.0247, 0.0788, 0.0901], + device='cuda:1'), in_proj_covar=tensor([0.0077, 0.0064, 0.0071, 0.0073, 0.0093, 0.0071, 0.0080, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006], + device='cuda:1') +2023-03-10 00:54:14,366 INFO [train.py:898] (1/4) Epoch 30, batch 1700, loss[loss=0.1333, simple_loss=0.2168, pruned_loss=0.02488, over 18380.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2452, pruned_loss=0.0318, over 3586384.82 frames. ], batch size: 42, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:54:19,908 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8349, 3.5704, 4.8239, 2.8636, 4.2174, 2.5384, 2.9491, 1.8297], + device='cuda:1'), covar=tensor([0.1297, 0.1028, 0.0197, 0.1101, 0.0537, 0.2798, 0.2791, 0.2427], + device='cuda:1'), in_proj_covar=tensor([0.0236, 0.0259, 0.0238, 0.0212, 0.0271, 0.0286, 0.0344, 0.0253], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-10 00:54:41,457 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=107110.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:54:46,858 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.859e+02 2.555e+02 2.944e+02 3.831e+02 7.585e+02, threshold=5.887e+02, percent-clipped=1.0 +2023-03-10 00:55:12,542 INFO [train.py:898] (1/4) Epoch 30, batch 1750, loss[loss=0.1739, simple_loss=0.2635, pruned_loss=0.04218, over 18367.00 frames. ], tot_loss[loss=0.1543, simple_loss=0.2452, pruned_loss=0.03167, over 3583453.97 frames. ], batch size: 56, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:55:16,967 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107140.0, num_to_drop=1, layers_to_drop={1} +2023-03-10 00:55:30,539 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107152.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:55:37,675 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=107158.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:56:12,004 INFO [train.py:898] (1/4) Epoch 30, batch 1800, loss[loss=0.1754, simple_loss=0.2639, pruned_loss=0.04346, over 18453.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2446, pruned_loss=0.0314, over 3590394.68 frames. ], batch size: 59, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:56:28,966 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107201.0, num_to_drop=1, layers_to_drop={2} +2023-03-10 00:56:43,010 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107213.0, num_to_drop=1, layers_to_drop={1} +2023-03-10 00:56:44,846 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.514e+02 2.402e+02 2.925e+02 3.436e+02 5.429e+02, threshold=5.850e+02, percent-clipped=0.0 +2023-03-10 00:57:09,816 INFO [train.py:898] (1/4) Epoch 30, batch 1850, loss[loss=0.1362, simple_loss=0.2202, pruned_loss=0.02606, over 17192.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2456, pruned_loss=0.03179, over 3570874.01 frames. ], batch size: 38, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:57:21,854 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.5747, 5.5372, 5.1638, 5.4816, 5.4621, 4.8902, 5.3687, 5.1331], + device='cuda:1'), covar=tensor([0.0421, 0.0421, 0.1274, 0.0776, 0.0560, 0.0400, 0.0448, 0.1115], + device='cuda:1'), in_proj_covar=tensor([0.0525, 0.0595, 0.0738, 0.0464, 0.0498, 0.0541, 0.0573, 0.0718], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:1') +2023-03-10 00:58:08,615 INFO [train.py:898] (1/4) Epoch 30, batch 1900, loss[loss=0.1711, simple_loss=0.2709, pruned_loss=0.03565, over 17934.00 frames. ], tot_loss[loss=0.1552, simple_loss=0.2463, pruned_loss=0.03208, over 3567067.06 frames. ], batch size: 65, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 00:58:36,660 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-10 00:58:41,095 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.738e+02 2.430e+02 2.951e+02 3.799e+02 8.497e+02, threshold=5.903e+02, percent-clipped=5.0 +2023-03-10 00:58:53,443 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9340, 3.8041, 5.1874, 2.9439, 4.5574, 2.5496, 3.2584, 1.7786], + device='cuda:1'), covar=tensor([0.1301, 0.0949, 0.0201, 0.1074, 0.0511, 0.2869, 0.2695, 0.2448], + device='cuda:1'), in_proj_covar=tensor([0.0234, 0.0256, 0.0236, 0.0211, 0.0268, 0.0284, 0.0341, 0.0250], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-10 00:59:07,029 INFO [train.py:898] (1/4) Epoch 30, batch 1950, loss[loss=0.1406, simple_loss=0.2304, pruned_loss=0.02534, over 18288.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2464, pruned_loss=0.03215, over 3572924.65 frames. ], batch size: 49, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 00:59:09,612 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.5152, 2.8817, 4.2683, 3.6257, 2.7508, 4.5782, 3.8971, 3.0318], + device='cuda:1'), covar=tensor([0.0589, 0.1499, 0.0343, 0.0543, 0.1476, 0.0197, 0.0571, 0.0876], + device='cuda:1'), in_proj_covar=tensor([0.0225, 0.0245, 0.0239, 0.0175, 0.0226, 0.0220, 0.0259, 0.0199], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-10 01:00:04,529 INFO [train.py:898] (1/4) Epoch 30, batch 2000, loss[loss=0.1512, simple_loss=0.2497, pruned_loss=0.02637, over 18480.00 frames. ], tot_loss[loss=0.1548, simple_loss=0.2459, pruned_loss=0.03185, over 3580562.62 frames. ], batch size: 51, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 01:00:38,307 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.734e+02 2.377e+02 2.711e+02 3.457e+02 7.129e+02, threshold=5.422e+02, percent-clipped=2.0 +2023-03-10 01:01:03,314 INFO [train.py:898] (1/4) Epoch 30, batch 2050, loss[loss=0.1664, simple_loss=0.2593, pruned_loss=0.03674, over 18388.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2458, pruned_loss=0.03202, over 3568097.48 frames. ], batch size: 52, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 01:01:53,063 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.0017, 3.7295, 5.1674, 2.9373, 4.4651, 2.6398, 3.1652, 1.8133], + device='cuda:1'), covar=tensor([0.1233, 0.1018, 0.0181, 0.1068, 0.0542, 0.2804, 0.2788, 0.2473], + device='cuda:1'), in_proj_covar=tensor([0.0235, 0.0257, 0.0236, 0.0211, 0.0270, 0.0286, 0.0342, 0.0251], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-10 01:02:02,303 INFO [train.py:898] (1/4) Epoch 30, batch 2100, loss[loss=0.1845, simple_loss=0.2735, pruned_loss=0.04771, over 18275.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.2462, pruned_loss=0.03202, over 3565850.61 frames. ], batch size: 57, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 01:02:13,400 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107496.0, num_to_drop=1, layers_to_drop={2} +2023-03-10 01:02:27,083 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107508.0, num_to_drop=1, layers_to_drop={2} +2023-03-10 01:02:29,432 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107510.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:02:36,351 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.605e+02 2.608e+02 2.930e+02 3.296e+02 8.592e+02, threshold=5.861e+02, percent-clipped=2.0 +2023-03-10 01:03:00,488 INFO [train.py:898] (1/4) Epoch 30, batch 2150, loss[loss=0.1347, simple_loss=0.2239, pruned_loss=0.0228, over 18247.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2456, pruned_loss=0.03175, over 3573980.45 frames. ], batch size: 45, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 01:03:23,321 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6694, 3.3743, 4.7974, 3.0273, 4.1973, 2.5112, 2.9307, 1.8162], + device='cuda:1'), covar=tensor([0.1420, 0.1124, 0.0190, 0.0929, 0.0546, 0.2749, 0.2647, 0.2375], + device='cuda:1'), in_proj_covar=tensor([0.0235, 0.0257, 0.0237, 0.0211, 0.0269, 0.0286, 0.0342, 0.0252], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-10 01:03:40,473 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107571.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:03:58,556 INFO [train.py:898] (1/4) Epoch 30, batch 2200, loss[loss=0.1446, simple_loss=0.2285, pruned_loss=0.03037, over 18467.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.2455, pruned_loss=0.03148, over 3564745.60 frames. ], batch size: 44, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 01:04:08,629 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.0301, 5.5276, 3.3202, 5.3947, 5.2995, 5.5603, 5.4011, 3.0867], + device='cuda:1'), covar=tensor([0.0225, 0.0063, 0.0597, 0.0064, 0.0063, 0.0066, 0.0076, 0.0824], + device='cuda:1'), in_proj_covar=tensor([0.0095, 0.0085, 0.0099, 0.0101, 0.0091, 0.0081, 0.0088, 0.0100], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-10 01:04:32,916 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.698e+02 2.424e+02 2.775e+02 3.608e+02 1.106e+03, threshold=5.550e+02, percent-clipped=4.0 +2023-03-10 01:04:56,686 INFO [train.py:898] (1/4) Epoch 30, batch 2250, loss[loss=0.1423, simple_loss=0.234, pruned_loss=0.02528, over 18555.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2459, pruned_loss=0.03163, over 3557225.84 frames. ], batch size: 49, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 01:05:22,642 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107659.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:05:54,193 INFO [train.py:898] (1/4) Epoch 30, batch 2300, loss[loss=0.1694, simple_loss=0.2681, pruned_loss=0.03536, over 18623.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2471, pruned_loss=0.03183, over 3568389.38 frames. ], batch size: 52, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 01:06:05,911 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.1886, 4.6074, 4.3749, 4.3367, 4.2017, 4.8539, 4.5254, 4.2513], + device='cuda:1'), covar=tensor([0.1314, 0.1135, 0.1097, 0.1014, 0.1414, 0.1053, 0.0795, 0.1777], + device='cuda:1'), in_proj_covar=tensor([0.0380, 0.0309, 0.0338, 0.0343, 0.0343, 0.0451, 0.0304, 0.0447], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-10 01:06:27,333 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.765e+02 2.455e+02 2.885e+02 3.451e+02 1.072e+03, threshold=5.770e+02, percent-clipped=4.0 +2023-03-10 01:06:32,227 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107720.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:06:52,530 INFO [train.py:898] (1/4) Epoch 30, batch 2350, loss[loss=0.1533, simple_loss=0.2502, pruned_loss=0.02824, over 17692.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2464, pruned_loss=0.0315, over 3572000.54 frames. ], batch size: 70, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 01:06:52,808 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107737.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:07:51,011 INFO [train.py:898] (1/4) Epoch 30, batch 2400, loss[loss=0.1542, simple_loss=0.2436, pruned_loss=0.03239, over 18379.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2464, pruned_loss=0.03153, over 3581028.68 frames. ], batch size: 50, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 01:08:01,210 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=107796.0, num_to_drop=1, layers_to_drop={2} +2023-03-10 01:08:03,502 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107798.0, num_to_drop=1, layers_to_drop={1} +2023-03-10 01:08:14,956 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=107808.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:08:24,298 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.608e+02 2.477e+02 2.910e+02 3.480e+02 7.076e+02, threshold=5.821e+02, percent-clipped=2.0 +2023-03-10 01:08:28,917 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.5479, 5.0711, 5.0367, 5.0459, 4.5970, 4.9555, 4.4665, 4.9463], + device='cuda:1'), covar=tensor([0.0309, 0.0298, 0.0230, 0.0456, 0.0367, 0.0249, 0.1090, 0.0363], + device='cuda:1'), in_proj_covar=tensor([0.0237, 0.0285, 0.0286, 0.0367, 0.0292, 0.0293, 0.0325, 0.0285], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:1') +2023-03-10 01:08:48,117 INFO [train.py:898] (1/4) Epoch 30, batch 2450, loss[loss=0.1307, simple_loss=0.2158, pruned_loss=0.02283, over 18498.00 frames. ], tot_loss[loss=0.1539, simple_loss=0.2452, pruned_loss=0.03125, over 3584468.66 frames. ], batch size: 47, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:08:51,492 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.8539, 3.7362, 5.0068, 4.3999, 3.3755, 3.1484, 4.5537, 5.2874], + device='cuda:1'), covar=tensor([0.0846, 0.1538, 0.0239, 0.0420, 0.0973, 0.1175, 0.0398, 0.0360], + device='cuda:1'), in_proj_covar=tensor([0.0158, 0.0290, 0.0182, 0.0191, 0.0202, 0.0200, 0.0207, 0.0220], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-10 01:08:56,619 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=107844.0, num_to_drop=1, layers_to_drop={0} +2023-03-10 01:09:01,141 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.2596, 5.8009, 5.4509, 5.5796, 5.4425, 5.1701, 5.8651, 5.8087], + device='cuda:1'), covar=tensor([0.1163, 0.0828, 0.0567, 0.0727, 0.1376, 0.0715, 0.0639, 0.0701], + device='cuda:1'), in_proj_covar=tensor([0.0649, 0.0576, 0.0411, 0.0597, 0.0796, 0.0593, 0.0811, 0.0629], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-10 01:09:04,776 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8291, 4.9201, 4.9161, 4.6254, 4.6471, 4.6521, 4.9905, 4.9883], + device='cuda:1'), covar=tensor([0.0073, 0.0063, 0.0057, 0.0132, 0.0077, 0.0144, 0.0076, 0.0085], + device='cuda:1'), in_proj_covar=tensor([0.0104, 0.0078, 0.0083, 0.0104, 0.0083, 0.0113, 0.0096, 0.0095], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-10 01:09:06,265 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=4.38 vs. limit=5.0 +2023-03-10 01:09:10,504 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=107856.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:09:18,690 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6708, 3.6247, 4.8942, 4.1686, 3.2946, 2.9463, 4.3306, 5.1147], + device='cuda:1'), covar=tensor([0.0883, 0.1327, 0.0251, 0.0469, 0.0978, 0.1307, 0.0449, 0.0236], + device='cuda:1'), in_proj_covar=tensor([0.0157, 0.0288, 0.0181, 0.0190, 0.0201, 0.0200, 0.0206, 0.0219], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-10 01:09:21,767 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107866.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:09:46,130 INFO [train.py:898] (1/4) Epoch 30, batch 2500, loss[loss=0.1431, simple_loss=0.2325, pruned_loss=0.0269, over 18341.00 frames. ], tot_loss[loss=0.154, simple_loss=0.2455, pruned_loss=0.03125, over 3593386.28 frames. ], batch size: 46, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:10:19,342 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.763e+02 2.392e+02 2.985e+02 3.620e+02 9.433e+02, threshold=5.970e+02, percent-clipped=3.0 +2023-03-10 01:10:44,306 INFO [train.py:898] (1/4) Epoch 30, batch 2550, loss[loss=0.1362, simple_loss=0.2195, pruned_loss=0.02641, over 18417.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2444, pruned_loss=0.03109, over 3598184.06 frames. ], batch size: 42, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:11:15,940 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.9817, 3.7080, 5.2326, 2.9592, 4.6082, 2.6526, 3.2312, 1.8312], + device='cuda:1'), covar=tensor([0.1213, 0.1035, 0.0206, 0.1116, 0.0472, 0.2771, 0.2579, 0.2433], + device='cuda:1'), in_proj_covar=tensor([0.0236, 0.0258, 0.0237, 0.0212, 0.0270, 0.0286, 0.0343, 0.0252], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:1') +2023-03-10 01:11:42,236 INFO [train.py:898] (1/4) Epoch 30, batch 2600, loss[loss=0.1588, simple_loss=0.2516, pruned_loss=0.03303, over 18313.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2445, pruned_loss=0.03108, over 3598226.56 frames. ], batch size: 54, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:11:52,574 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8410, 4.4714, 4.4579, 3.4126, 3.7515, 3.4715, 2.6653, 2.7853], + device='cuda:1'), covar=tensor([0.0232, 0.0158, 0.0095, 0.0334, 0.0343, 0.0235, 0.0764, 0.0774], + device='cuda:1'), in_proj_covar=tensor([0.0078, 0.0065, 0.0073, 0.0074, 0.0094, 0.0072, 0.0080, 0.0089], + device='cuda:1'), out_proj_covar=tensor([0.0006, 0.0005, 0.0005, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006], + device='cuda:1') +2023-03-10 01:12:19,586 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108015.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:12:20,530 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.532e+02 2.560e+02 3.005e+02 3.499e+02 6.456e+02, threshold=6.010e+02, percent-clipped=1.0 +2023-03-10 01:12:44,878 INFO [train.py:898] (1/4) Epoch 30, batch 2650, loss[loss=0.1585, simple_loss=0.2483, pruned_loss=0.03438, over 18295.00 frames. ], tot_loss[loss=0.1535, simple_loss=0.2447, pruned_loss=0.03118, over 3599788.58 frames. ], batch size: 49, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:13:41,208 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.8579, 3.3798, 4.5920, 3.7167, 3.0752, 4.8192, 3.9791, 3.3879], + device='cuda:1'), covar=tensor([0.0546, 0.1190, 0.0268, 0.0555, 0.1392, 0.0202, 0.0545, 0.0745], + device='cuda:1'), in_proj_covar=tensor([0.0222, 0.0243, 0.0238, 0.0174, 0.0225, 0.0219, 0.0258, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-10 01:13:42,679 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-10 01:13:44,283 INFO [train.py:898] (1/4) Epoch 30, batch 2700, loss[loss=0.1425, simple_loss=0.2257, pruned_loss=0.02963, over 18264.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2445, pruned_loss=0.03106, over 3593153.55 frames. ], batch size: 45, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:13:49,951 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=2.90 vs. limit=5.0 +2023-03-10 01:13:51,682 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108093.0, num_to_drop=1, layers_to_drop={3} +2023-03-10 01:14:17,645 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.687e+02 2.532e+02 2.946e+02 3.512e+02 1.150e+03, threshold=5.891e+02, percent-clipped=1.0 +2023-03-10 01:14:38,652 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.8158, 4.0225, 2.3793, 4.0736, 5.0865, 2.8093, 3.6065, 3.6576], + device='cuda:1'), covar=tensor([0.0243, 0.1481, 0.1682, 0.0639, 0.0136, 0.1113, 0.0798, 0.1025], + device='cuda:1'), in_proj_covar=tensor([0.0190, 0.0289, 0.0215, 0.0205, 0.0150, 0.0188, 0.0225, 0.0235], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-10 01:14:42,594 INFO [train.py:898] (1/4) Epoch 30, batch 2750, loss[loss=0.1677, simple_loss=0.2553, pruned_loss=0.04007, over 18410.00 frames. ], tot_loss[loss=0.1536, simple_loss=0.2451, pruned_loss=0.03102, over 3597204.05 frames. ], batch size: 48, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:14:50,204 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9039, 4.4962, 4.4775, 3.4549, 3.7081, 3.4395, 2.5179, 2.7530], + device='cuda:1'), covar=tensor([0.0213, 0.0136, 0.0089, 0.0304, 0.0331, 0.0237, 0.0767, 0.0726], + device='cuda:1'), in_proj_covar=tensor([0.0077, 0.0065, 0.0072, 0.0073, 0.0094, 0.0072, 0.0080, 0.0088], + device='cuda:1'), out_proj_covar=tensor([0.0006, 0.0005, 0.0005, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006], + device='cuda:1') +2023-03-10 01:15:16,472 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108166.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:15:41,148 INFO [train.py:898] (1/4) Epoch 30, batch 2800, loss[loss=0.1503, simple_loss=0.2414, pruned_loss=0.0296, over 18384.00 frames. ], tot_loss[loss=0.1527, simple_loss=0.2441, pruned_loss=0.03066, over 3604388.11 frames. ], batch size: 50, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:16:12,747 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108214.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:16:14,906 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.768e+02 2.463e+02 2.976e+02 3.777e+02 6.000e+02, threshold=5.952e+02, percent-clipped=1.0 +2023-03-10 01:16:40,172 INFO [train.py:898] (1/4) Epoch 30, batch 2850, loss[loss=0.1488, simple_loss=0.251, pruned_loss=0.02332, over 16030.00 frames. ], tot_loss[loss=0.1536, simple_loss=0.2449, pruned_loss=0.03114, over 3597720.28 frames. ], batch size: 94, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:16:47,268 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9984, 3.5925, 4.7302, 4.0365, 3.1706, 4.9499, 4.1755, 3.5008], + device='cuda:1'), covar=tensor([0.0438, 0.1083, 0.0233, 0.0414, 0.1250, 0.0152, 0.0484, 0.0722], + device='cuda:1'), in_proj_covar=tensor([0.0222, 0.0243, 0.0238, 0.0174, 0.0226, 0.0218, 0.0258, 0.0197], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-10 01:17:07,812 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7500, 5.1375, 2.6745, 4.9753, 4.8885, 5.1478, 4.9462, 2.7437], + device='cuda:1'), covar=tensor([0.0264, 0.0074, 0.0835, 0.0089, 0.0081, 0.0073, 0.0096, 0.1018], + device='cuda:1'), in_proj_covar=tensor([0.0095, 0.0085, 0.0099, 0.0101, 0.0091, 0.0081, 0.0087, 0.0099], + device='cuda:1'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:1') +2023-03-10 01:17:09,639 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.6010, 3.3107, 2.3130, 4.3703, 3.0594, 4.0138, 2.6738, 3.8143], + device='cuda:1'), covar=tensor([0.0676, 0.0950, 0.1457, 0.0480, 0.0813, 0.0361, 0.1129, 0.0472], + device='cuda:1'), in_proj_covar=tensor([0.0227, 0.0237, 0.0199, 0.0302, 0.0201, 0.0272, 0.0211, 0.0211], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:1') +2023-03-10 01:17:38,309 INFO [train.py:898] (1/4) Epoch 30, batch 2900, loss[loss=0.1476, simple_loss=0.2496, pruned_loss=0.02283, over 18517.00 frames. ], tot_loss[loss=0.1543, simple_loss=0.2458, pruned_loss=0.03146, over 3575557.47 frames. ], batch size: 53, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:18:11,356 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108315.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:18:12,786 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.844e+02 2.541e+02 3.023e+02 3.918e+02 9.927e+02, threshold=6.045e+02, percent-clipped=2.0 +2023-03-10 01:18:23,008 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108325.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:18:25,824 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0809, 5.8031, 5.4397, 5.7528, 5.2761, 5.6084, 6.0577, 5.8512], + device='cuda:1'), covar=tensor([0.2729, 0.1362, 0.0959, 0.1214, 0.2685, 0.1198, 0.0950, 0.1214], + device='cuda:1'), in_proj_covar=tensor([0.0657, 0.0582, 0.0414, 0.0601, 0.0802, 0.0598, 0.0818, 0.0634], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:1') +2023-03-10 01:18:36,947 INFO [train.py:898] (1/4) Epoch 30, batch 2950, loss[loss=0.1357, simple_loss=0.2204, pruned_loss=0.02548, over 18104.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.245, pruned_loss=0.03126, over 3584290.20 frames. ], batch size: 40, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:19:03,187 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-03-10 01:19:03,902 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.4045, 5.2906, 5.6845, 5.7002, 5.3346, 6.1838, 5.8864, 5.5236], + device='cuda:1'), covar=tensor([0.1164, 0.0591, 0.0883, 0.0732, 0.1512, 0.0663, 0.0626, 0.1649], + device='cuda:1'), in_proj_covar=tensor([0.0379, 0.0308, 0.0336, 0.0339, 0.0342, 0.0449, 0.0303, 0.0444], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-10 01:19:07,154 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108363.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:19:22,226 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.4172, 3.3577, 3.2830, 2.7809, 3.1739, 2.5352, 2.6150, 3.3253], + device='cuda:1'), covar=tensor([0.0105, 0.0149, 0.0121, 0.0227, 0.0152, 0.0304, 0.0317, 0.0108], + device='cuda:1'), in_proj_covar=tensor([0.0164, 0.0183, 0.0153, 0.0204, 0.0163, 0.0194, 0.0199, 0.0142], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-10 01:19:29,461 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6469, 2.5827, 2.6337, 2.5063, 2.5614, 2.2822, 2.3659, 2.6844], + device='cuda:1'), covar=tensor([0.0122, 0.0139, 0.0099, 0.0135, 0.0141, 0.0193, 0.0213, 0.0102], + device='cuda:1'), in_proj_covar=tensor([0.0164, 0.0183, 0.0152, 0.0204, 0.0163, 0.0193, 0.0199, 0.0141], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-10 01:19:34,088 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108386.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:19:34,878 INFO [train.py:898] (1/4) Epoch 30, batch 3000, loss[loss=0.1631, simple_loss=0.261, pruned_loss=0.03259, over 17999.00 frames. ], tot_loss[loss=0.1535, simple_loss=0.2444, pruned_loss=0.03124, over 3576102.10 frames. ], batch size: 65, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:19:34,878 INFO [train.py:923] (1/4) Computing validation loss +2023-03-10 01:19:45,334 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.7235, 2.3437, 2.1916, 2.3144, 2.8725, 2.8615, 2.7041, 2.4237], + device='cuda:1'), covar=tensor([0.0225, 0.0303, 0.0613, 0.0500, 0.0229, 0.0223, 0.0440, 0.0446], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0152, 0.0168, 0.0169, 0.0147, 0.0133, 0.0163, 0.0168], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-10 01:19:47,080 INFO [train.py:932] (1/4) Epoch 30, validation: loss=0.1491, simple_loss=0.2469, pruned_loss=0.02567, over 944034.00 frames. +2023-03-10 01:19:47,080 INFO [train.py:933] (1/4) Maximum memory allocated so far is 19889MB +2023-03-10 01:19:54,080 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108393.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:20:01,024 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.0496, 5.1459, 5.1724, 4.9387, 4.9105, 4.9397, 5.2647, 5.2386], + device='cuda:1'), covar=tensor([0.0067, 0.0063, 0.0048, 0.0102, 0.0052, 0.0151, 0.0064, 0.0071], + device='cuda:1'), in_proj_covar=tensor([0.0103, 0.0078, 0.0083, 0.0104, 0.0083, 0.0112, 0.0095, 0.0095], + device='cuda:1'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:1') +2023-03-10 01:20:20,293 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.803e+02 2.459e+02 2.931e+02 3.720e+02 6.119e+02, threshold=5.863e+02, percent-clipped=1.0 +2023-03-10 01:20:39,318 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108432.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:20:45,382 INFO [train.py:898] (1/4) Epoch 30, batch 3050, loss[loss=0.1546, simple_loss=0.2495, pruned_loss=0.02984, over 18397.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2456, pruned_loss=0.03178, over 3574460.74 frames. ], batch size: 50, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:20:50,583 INFO [zipformer.py:625] (1/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108441.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:21:43,454 INFO [train.py:898] (1/4) Epoch 30, batch 3100, loss[loss=0.1471, simple_loss=0.2315, pruned_loss=0.03136, over 18404.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.2453, pruned_loss=0.03181, over 3578930.81 frames. ], batch size: 42, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:21:51,112 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108493.0, num_to_drop=1, layers_to_drop={2} +2023-03-10 01:22:17,722 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.743e+02 2.471e+02 2.835e+02 3.390e+02 1.271e+03, threshold=5.670e+02, percent-clipped=3.0 +2023-03-10 01:22:41,853 INFO [train.py:898] (1/4) Epoch 30, batch 3150, loss[loss=0.181, simple_loss=0.2647, pruned_loss=0.04866, over 18352.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2453, pruned_loss=0.0317, over 3579984.71 frames. ], batch size: 56, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:22:46,626 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([5.1328, 5.1763, 5.3440, 5.4061, 5.0670, 5.9154, 5.5543, 5.1745], + device='cuda:1'), covar=tensor([0.1227, 0.0687, 0.0855, 0.0808, 0.1451, 0.0669, 0.0716, 0.1795], + device='cuda:1'), in_proj_covar=tensor([0.0377, 0.0306, 0.0336, 0.0338, 0.0340, 0.0446, 0.0302, 0.0443], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:1') +2023-03-10 01:23:17,748 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([4.6517, 3.9684, 2.2609, 3.7594, 4.9549, 2.4671, 3.7154, 3.7535], + device='cuda:1'), covar=tensor([0.0235, 0.1219, 0.1823, 0.0748, 0.0143, 0.1287, 0.0728, 0.0825], + device='cuda:1'), in_proj_covar=tensor([0.0191, 0.0291, 0.0215, 0.0206, 0.0151, 0.0188, 0.0226, 0.0236], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-10 01:23:29,858 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108577.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:23:40,619 INFO [train.py:898] (1/4) Epoch 30, batch 3200, loss[loss=0.1452, simple_loss=0.2381, pruned_loss=0.02615, over 18631.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.245, pruned_loss=0.03155, over 3591074.51 frames. ], batch size: 52, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:24:16,502 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.665e+02 2.358e+02 2.876e+02 3.361e+02 7.357e+02, threshold=5.753e+02, percent-clipped=3.0 +2023-03-10 01:24:39,492 INFO [train.py:898] (1/4) Epoch 30, batch 3250, loss[loss=0.1354, simple_loss=0.2215, pruned_loss=0.02461, over 18165.00 frames. ], tot_loss[loss=0.154, simple_loss=0.2448, pruned_loss=0.03158, over 3569310.77 frames. ], batch size: 44, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:24:40,962 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108638.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:24:50,100 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.64 vs. limit=2.0 +2023-03-10 01:25:02,656 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-03-10 01:25:11,636 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.00 vs. limit=5.0 +2023-03-10 01:25:14,832 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-03-10 01:25:31,362 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108681.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:25:37,771 INFO [train.py:898] (1/4) Epoch 30, batch 3300, loss[loss=0.1666, simple_loss=0.2573, pruned_loss=0.03793, over 18323.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2447, pruned_loss=0.03149, over 3572716.00 frames. ], batch size: 57, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:26:12,808 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.499e+02 2.588e+02 2.944e+02 3.535e+02 1.316e+03, threshold=5.888e+02, percent-clipped=3.0 +2023-03-10 01:26:36,460 INFO [train.py:898] (1/4) Epoch 30, batch 3350, loss[loss=0.1658, simple_loss=0.2564, pruned_loss=0.03757, over 12722.00 frames. ], tot_loss[loss=0.1543, simple_loss=0.245, pruned_loss=0.03176, over 3574921.67 frames. ], batch size: 131, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:27:34,251 INFO [train.py:898] (1/4) Epoch 30, batch 3400, loss[loss=0.1313, simple_loss=0.2126, pruned_loss=0.02497, over 18184.00 frames. ], tot_loss[loss=0.1543, simple_loss=0.2451, pruned_loss=0.0318, over 3575076.77 frames. ], batch size: 44, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:27:35,601 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108788.0, num_to_drop=1, layers_to_drop={2} +2023-03-10 01:27:39,077 INFO [zipformer.py:625] (1/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108791.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:28:09,507 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.896e+02 2.642e+02 3.152e+02 3.667e+02 6.078e+02, threshold=6.305e+02, percent-clipped=1.0 +2023-03-10 01:28:33,118 INFO [train.py:898] (1/4) Epoch 30, batch 3450, loss[loss=0.1669, simple_loss=0.2613, pruned_loss=0.03628, over 18296.00 frames. ], tot_loss[loss=0.1539, simple_loss=0.2447, pruned_loss=0.03158, over 3575782.90 frames. ], batch size: 57, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:28:42,701 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-10 01:28:50,251 INFO [zipformer.py:625] (1/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108852.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:29:31,187 INFO [train.py:898] (1/4) Epoch 30, batch 3500, loss[loss=0.1646, simple_loss=0.262, pruned_loss=0.0336, over 17042.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2442, pruned_loss=0.03119, over 3564235.51 frames. ], batch size: 78, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:29:43,751 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.9558, 3.9537, 3.8159, 3.4458, 3.7165, 3.0962, 3.1175, 3.9723], + device='cuda:1'), covar=tensor([0.0079, 0.0083, 0.0079, 0.0135, 0.0089, 0.0184, 0.0200, 0.0056], + device='cuda:1'), in_proj_covar=tensor([0.0163, 0.0182, 0.0151, 0.0202, 0.0162, 0.0192, 0.0197, 0.0141], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:1') +2023-03-10 01:30:05,389 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.699e+02 2.567e+02 2.966e+02 3.432e+02 4.808e+02, threshold=5.933e+02, percent-clipped=0.0 +2023-03-10 01:30:05,844 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.6042, 3.1162, 3.8717, 3.5626, 3.0265, 2.9453, 3.5959, 4.0133], + device='cuda:1'), covar=tensor([0.0777, 0.1047, 0.0294, 0.0457, 0.0919, 0.1076, 0.0451, 0.0329], + device='cuda:1'), in_proj_covar=tensor([0.0158, 0.0289, 0.0182, 0.0191, 0.0201, 0.0201, 0.0206, 0.0221], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:1') +2023-03-10 01:30:23,071 INFO [zipformer.py:625] (1/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108933.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:30:27,371 INFO [train.py:898] (1/4) Epoch 30, batch 3550, loss[loss=0.1598, simple_loss=0.2521, pruned_loss=0.03368, over 18256.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2448, pruned_loss=0.03132, over 3569346.36 frames. ], batch size: 60, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:30:32,834 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([3.7636, 3.1520, 4.5403, 3.6796, 2.8531, 4.7589, 3.9636, 3.0945], + device='cuda:1'), covar=tensor([0.0521, 0.1399, 0.0271, 0.0547, 0.1545, 0.0208, 0.0581, 0.0904], + device='cuda:1'), in_proj_covar=tensor([0.0223, 0.0247, 0.0240, 0.0176, 0.0229, 0.0221, 0.0260, 0.0200], + device='cuda:1'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:1') +2023-03-10 01:30:35,269 INFO [scaling.py:679] (1/4) Whitening: num_groups=1, num_channels=384, metric=3.83 vs. limit=5.0 +2023-03-10 01:31:06,482 INFO [scaling.py:679] (1/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-10 01:31:14,498 INFO [zipformer.py:625] (1/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108981.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:31:20,777 INFO [train.py:898] (1/4) Epoch 30, batch 3600, loss[loss=0.1724, simple_loss=0.2683, pruned_loss=0.03824, over 18122.00 frames. ], tot_loss[loss=0.1536, simple_loss=0.2445, pruned_loss=0.03139, over 3573549.20 frames. ], batch size: 62, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:31:47,466 INFO [zipformer.py:1455] (1/4) attn_weights_entropy = tensor([2.4882, 2.1286, 2.0956, 2.1824, 2.4305, 2.5018, 2.3770, 2.1594], + device='cuda:1'), covar=tensor([0.0232, 0.0217, 0.0433, 0.0395, 0.0209, 0.0211, 0.0358, 0.0316], + device='cuda:1'), in_proj_covar=tensor([0.0150, 0.0151, 0.0168, 0.0168, 0.0146, 0.0133, 0.0162, 0.0168], + device='cuda:1'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:1') +2023-03-10 01:31:52,858 INFO [optim.py:369] (1/4) Clipping_scale=2.0, grad-norm quartiles 1.673e+02 2.589e+02 3.051e+02 3.699e+02 9.146e+02, threshold=6.102e+02, percent-clipped=6.0 +2023-03-10 01:31:56,885 INFO [train.py:1165] (1/4) Done!