diff --git "a/exp/log/log-train-2023-03-08-13-46-28-2" "b/exp/log/log-train-2023-03-08-13-46-28-2" new file mode 100644--- /dev/null +++ "b/exp/log/log-train-2023-03-08-13-46-28-2" @@ -0,0 +1,15010 @@ +2023-03-08 13:46:28,357 INFO [train.py:970] (2/4) Training started +2023-03-08 13:46:28,358 INFO [train.py:980] (2/4) Device: cuda:2 +2023-03-08 13:46:28,366 INFO [train.py:989] (2/4) {'frame_shift_ms': 10.0, 'allowed_excess_duration_ratio': 0.1, 'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.22', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': '96c9a2aece2a3a7633da07740e24fa3d96f5498c', 'k2-git-date': 'Thu Nov 10 08:14:02 2022', 'lhotse-version': '1.13.0.dev+git.527d964.clean', 'torch-version': '1.12.1', 'torch-cuda-available': True, 'torch-cuda-version': '11.6', 'python-version': '3.8', 'icefall-git-branch': 'random_padding', 'icefall-git-sha1': '4cf2472-dirty', 'icefall-git-date': 'Wed Mar 1 23:53:23 2023', 'icefall-path': '/ceph-data4/yangxiaoyu/softwares/icefall_development/icefall_random_padding', 'k2-path': '/ceph-data4/yangxiaoyu/softwares/anaconda3/envs/k2_latest/lib/python3.8/site-packages/k2/__init__.py', 'lhotse-path': '/ceph-data4/yangxiaoyu/softwares/lhotse_development/lhotse_random_padding_left/lhotse/__init__.py', 'hostname': 'de-74279-k2-train-9-0208143539-7dcb6bfd79-b6fdq', 'IP address': '10.177.13.150'}, 'world_size': 4, 'master_port': 18180, 'tensorboard': True, 'num_epochs': 30, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('pruned_transducer_stateless7/exp_960h_no_paddingidx_ngpu4'), 'bpe_model': 'data/lang_bpe_500/bpe.model', 'base_lr': 0.05, 'lr_batches': 5000, 'lr_epochs': 3.5, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 2000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,4,3,2,4', 'feedforward_dims': '1024,1024,2048,2048,1024', 'nhead': '8,8,8,8,8', 'encoder_dims': '384,384,384,384,384', 'attention_dims': '192,192,192,192,192', 'encoder_unmasked_dims': '256,256,256,256,256', 'zipformer_downsampling_factors': '1,2,4,8,2', 'cnn_module_kernels': '31,31,31,31,31', 'decoder_dim': 512, 'joiner_dim': 512, 'full_libri': True, 'manifest_dir': PosixPath('data/fbank'), 'max_duration': 750, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'random_left_padding': False, 'num_left_padding': 8, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': True, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2023-03-08 13:46:28,367 INFO [train.py:991] (2/4) About to create model +2023-03-08 13:46:29,218 INFO [zipformer.py:178] (2/4) At encoder stack 4, which has downsampling_factor=2, we will combine the outputs of layers 1 and 3, with downsampling_factors=2 and 8. +2023-03-08 13:46:29,242 INFO [train.py:995] (2/4) Number of model parameters: 70369391 +2023-03-08 13:46:32,460 INFO [train.py:1010] (2/4) Using DDP +2023-03-08 13:46:32,864 INFO [asr_datamodule.py:439] (2/4) About to get the shuffled train-clean-100, train-clean-360 and train-other-500 cuts +2023-03-08 13:46:32,867 INFO [asr_datamodule.py:244] (2/4) Enable MUSAN +2023-03-08 13:46:32,867 INFO [asr_datamodule.py:245] (2/4) About to get Musan cuts +2023-03-08 13:46:35,143 INFO [asr_datamodule.py:269] (2/4) Enable SpecAugment +2023-03-08 13:46:35,143 INFO [asr_datamodule.py:270] (2/4) Time warp factor: 80 +2023-03-08 13:46:35,143 INFO [asr_datamodule.py:280] (2/4) Num frame mask: 10 +2023-03-08 13:46:35,143 INFO [asr_datamodule.py:293] (2/4) About to create train dataset +2023-03-08 13:46:35,144 INFO [asr_datamodule.py:320] (2/4) Using DynamicBucketingSampler. +2023-03-08 13:46:41,966 INFO [asr_datamodule.py:335] (2/4) About to create train dataloader +2023-03-08 13:46:41,967 INFO [asr_datamodule.py:449] (2/4) About to get dev-clean cuts +2023-03-08 13:46:41,973 INFO [asr_datamodule.py:456] (2/4) About to get dev-other cuts +2023-03-08 13:46:41,977 INFO [asr_datamodule.py:366] (2/4) About to create dev dataset +2023-03-08 13:46:42,318 INFO [asr_datamodule.py:383] (2/4) About to create dev dataloader +2023-03-08 13:47:06,859 INFO [train.py:898] (2/4) Epoch 1, batch 0, loss[loss=7.385, simple_loss=6.685, pruned_loss=6.986, over 18254.00 frames. ], tot_loss[loss=7.385, simple_loss=6.685, pruned_loss=6.986, over 18254.00 frames. ], batch size: 45, lr: 2.50e-02, grad_scale: 2.0 +2023-03-08 13:47:06,859 INFO [train.py:923] (2/4) Computing validation loss +2023-03-08 13:47:18,760 INFO [train.py:932] (2/4) Epoch 1, validation: loss=6.911, simple_loss=6.237, pruned_loss=6.721, over 944034.00 frames. +2023-03-08 13:47:18,761 INFO [train.py:933] (2/4) Maximum memory allocated so far is 15035MB +2023-03-08 13:47:22,969 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-08 13:47:42,153 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 13:48:03,185 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.7972, 5.7950, 5.8079, 5.8081, 5.7970, 5.8194, 5.4320, 5.8201], + device='cuda:2'), covar=tensor([0.0007, 0.0005, 0.0011, 0.0010, 0.0010, 0.0011, 0.0011, 0.0009], + device='cuda:2'), in_proj_covar=tensor([0.0009, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009], + device='cuda:2'), out_proj_covar=tensor([8.9147e-06, 9.0228e-06, 8.8504e-06, 9.0644e-06, 8.8768e-06, 8.9869e-06, + 8.9764e-06, 9.0340e-06], device='cuda:2') +2023-03-08 13:48:05,156 INFO [train.py:898] (2/4) Epoch 1, batch 50, loss[loss=1.152, simple_loss=1.017, pruned_loss=1.202, over 18583.00 frames. ], tot_loss[loss=2.141, simple_loss=1.936, pruned_loss=1.961, over 819351.50 frames. ], batch size: 45, lr: 2.75e-02, grad_scale: 1.0 +2023-03-08 13:48:10,284 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=12.72 vs. limit=2.0 +2023-03-08 13:48:11,897 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=169.27 vs. limit=5.0 +2023-03-08 13:48:33,965 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 13:48:51,690 WARNING [train.py:888] (2/4) Grad scale is small: 0.0009765625 +2023-03-08 13:48:51,690 INFO [train.py:898] (2/4) Epoch 1, batch 100, loss[loss=1.144, simple_loss=0.9811, pruned_loss=1.287, over 17916.00 frames. ], tot_loss[loss=1.603, simple_loss=1.427, pruned_loss=1.588, over 1436285.27 frames. ], batch size: 65, lr: 3.00e-02, grad_scale: 0.001953125 +2023-03-08 13:49:00,733 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 6.526e+01 1.420e+02 2.842e+02 1.227e+03 3.323e+06, threshold=5.685e+02, percent-clipped=0.0 +2023-03-08 13:49:18,544 WARNING [optim.py:389] (2/4) Scaling gradients by 0.03670352324843407, model_norm_threshold=568.4981689453125 +2023-03-08 13:49:18,707 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.51, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=1.227e+08, grad_sumsq = 3.226e+09, orig_rms_sq=3.802e-02 +2023-03-08 13:49:21,752 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=3.47 vs. limit=2.0 +2023-03-08 13:49:26,891 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=57.15 vs. limit=5.0 +2023-03-08 13:49:28,992 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=144.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-08 13:49:34,259 INFO [train.py:898] (2/4) Epoch 1, batch 150, loss[loss=1.104, simple_loss=0.9286, pruned_loss=1.253, over 18356.00 frames. ], tot_loss[loss=1.389, simple_loss=1.216, pruned_loss=1.452, over 1912776.19 frames. ], batch size: 56, lr: 3.25e-02, grad_scale: 0.001953125 +2023-03-08 13:50:11,515 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=2.62 vs. limit=2.0 +2023-03-08 13:50:13,800 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.6119, 5.6085, 5.6177, 5.6192, 5.6151, 5.5959, 5.6182, 5.6140], + device='cuda:2'), covar=tensor([0.0056, 0.0025, 0.0070, 0.0037, 0.0088, 0.0062, 0.0037, 0.0032], + device='cuda:2'), in_proj_covar=tensor([0.0009, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009], + device='cuda:2'), out_proj_covar=tensor([8.8993e-06, 8.7822e-06, 8.7384e-06, 8.6446e-06, 8.7651e-06, 8.8340e-06, + 8.6383e-06, 8.7907e-06], device='cuda:2') +2023-03-08 13:50:16,349 WARNING [train.py:888] (2/4) Grad scale is small: 0.001953125 +2023-03-08 13:50:16,349 INFO [train.py:898] (2/4) Epoch 1, batch 200, loss[loss=1.011, simple_loss=0.8446, pruned_loss=1.095, over 18385.00 frames. ], tot_loss[loss=1.253, simple_loss=1.083, pruned_loss=1.331, over 2294058.38 frames. ], batch size: 52, lr: 3.50e-02, grad_scale: 0.00390625 +2023-03-08 13:50:32,161 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 7.044e+01 1.307e+02 2.204e+02 4.914e+02 1.549e+04, threshold=4.408e+02, percent-clipped=23.0 +2023-03-08 13:50:46,027 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=15.79 vs. limit=2.0 +2023-03-08 13:50:48,290 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=30.90 vs. limit=5.0 +2023-03-08 13:51:05,108 INFO [train.py:898] (2/4) Epoch 1, batch 250, loss[loss=0.9734, simple_loss=0.8161, pruned_loss=0.9794, over 18299.00 frames. ], tot_loss[loss=1.17, simple_loss=1.003, pruned_loss=1.236, over 2561971.90 frames. ], batch size: 54, lr: 3.75e-02, grad_scale: 0.00390625 +2023-03-08 13:51:14,325 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=7.48 vs. limit=2.0 +2023-03-08 13:51:37,156 WARNING [optim.py:389] (2/4) Scaling gradients by 0.0006386953755281866, model_norm_threshold=440.7669677734375 +2023-03-08 13:51:37,323 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.skip_modules.4.weight1 with proportion 0.43, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=2.042e+11, grad_sumsq = 2.042e+11, orig_rms_sq=1.000e+00 +2023-03-08 13:51:43,333 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=296.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 13:51:46,143 WARNING [optim.py:389] (2/4) Scaling gradients by 0.04052559658885002, model_norm_threshold=440.7669677734375 +2023-03-08 13:51:46,299 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.77, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=9.126e+07, grad_sumsq = 1.809e+09, orig_rms_sq=5.045e-02 +2023-03-08 13:51:46,652 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=300.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-08 13:51:47,140 WARNING [train.py:888] (2/4) Grad scale is small: 0.00390625 +2023-03-08 13:51:47,141 INFO [train.py:898] (2/4) Epoch 1, batch 300, loss[loss=1.006, simple_loss=0.8328, pruned_loss=1.003, over 18462.00 frames. ], tot_loss[loss=1.11, simple_loss=0.9449, pruned_loss=1.159, over 2799443.71 frames. ], batch size: 59, lr: 4.00e-02, grad_scale: 0.0078125 +2023-03-08 13:51:55,815 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 8.726e+01 1.552e+02 2.190e+02 3.865e+02 6.901e+05, threshold=4.380e+02, percent-clipped=20.0 +2023-03-08 13:51:56,935 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([6.3097, 6.3101, 6.3109, 6.3120, 6.3101, 6.3096, 6.3109, 6.3070], + device='cuda:2'), covar=tensor([0.0097, 0.0039, 0.0060, 0.0036, 0.0017, 0.0017, 0.0035, 0.0030], + device='cuda:2'), in_proj_covar=tensor([0.0009, 0.0009, 0.0008, 0.0009, 0.0008, 0.0008, 0.0008, 0.0008], + device='cuda:2'), out_proj_covar=tensor([8.7535e-06, 8.7397e-06, 8.5327e-06, 8.4401e-06, 8.6018e-06, 8.4691e-06, + 8.4232e-06, 8.5039e-06], device='cuda:2') +2023-03-08 13:52:21,626 WARNING [optim.py:389] (2/4) Scaling gradients by 0.00015154901484493166, model_norm_threshold=438.01873779296875 +2023-03-08 13:52:21,793 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.70, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=5.875e+12, grad_sumsq = 1.517e+14, orig_rms_sq=3.874e-02 +2023-03-08 13:52:22,244 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=52.14 vs. limit=5.0 +2023-03-08 13:52:23,427 WARNING [optim.py:389] (2/4) Scaling gradients by 0.01597026363015175, model_norm_threshold=438.01873779296875 +2023-03-08 13:52:23,697 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.80, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=6.028e+08, grad_sumsq = 1.556e+10, orig_rms_sq=3.874e-02 +2023-03-08 13:52:24,516 WARNING [optim.py:389] (2/4) Scaling gradients by 0.022203104570508003, model_norm_threshold=438.01873779296875 +2023-03-08 13:52:24,730 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.86, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=3.351e+08, grad_sumsq = 8.650e+09, orig_rms_sq=3.874e-02 +2023-03-08 13:52:26,179 WARNING [optim.py:389] (2/4) Scaling gradients by 0.008352968841791153, model_norm_threshold=438.01873779296875 +2023-03-08 13:52:26,341 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.77, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=2.124e+09, grad_sumsq = 5.347e+10, orig_rms_sq=3.973e-02 +2023-03-08 13:52:30,361 INFO [train.py:898] (2/4) Epoch 1, batch 350, loss[loss=0.9276, simple_loss=0.7572, pruned_loss=0.918, over 18289.00 frames. ], tot_loss[loss=1.069, simple_loss=0.9028, pruned_loss=1.103, over 2977357.27 frames. ], batch size: 49, lr: 4.25e-02, grad_scale: 0.00390625 +2023-03-08 13:52:31,227 WARNING [optim.py:389] (2/4) Scaling gradients by 0.00011210949014639482, model_norm_threshold=438.01873779296875 +2023-03-08 13:52:31,453 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.85, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=1.296e+13, grad_sumsq = 3.160e+14, orig_rms_sq=4.100e-02 +2023-03-08 13:52:34,663 WARNING [optim.py:389] (2/4) Scaling gradients by 0.06386774033308029, model_norm_threshold=438.01873779296875 +2023-03-08 13:52:34,829 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.58, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=2.733e+07, grad_sumsq = 6.508e+08, orig_rms_sq=4.200e-02 +2023-03-08 13:52:35,610 WARNING [optim.py:389] (2/4) Scaling gradients by 0.0002155240799766034, model_norm_threshold=438.01873779296875 +2023-03-08 13:52:35,777 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.85, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=3.507e+12, grad_sumsq = 8.280e+13, orig_rms_sq=4.236e-02 +2023-03-08 13:52:36,112 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=357.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-08 13:52:43,645 WARNING [optim.py:389] (2/4) Scaling gradients by 0.08033499121665955, model_norm_threshold=438.01873779296875 +2023-03-08 13:52:43,804 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.60, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=1.796e+07, grad_sumsq = 4.255e+08, orig_rms_sq=4.221e-02 +2023-03-08 13:52:58,804 WARNING [optim.py:389] (2/4) Scaling gradients by 0.00024505704641342163, model_norm_threshold=438.01873779296875 +2023-03-08 13:52:58,961 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.67, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=2.127e+12, grad_sumsq = 5.153e+13, orig_rms_sq=4.128e-02 +2023-03-08 13:53:00,760 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=387.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 13:53:01,253 WARNING [optim.py:389] (2/4) Scaling gradients by 0.0035240945871919394, model_norm_threshold=438.01873779296875 +2023-03-08 13:53:01,417 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.51, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=7.954e+09, grad_sumsq = 1.927e+11, orig_rms_sq=4.128e-02 +2023-03-08 13:53:02,952 WARNING [optim.py:389] (2/4) Scaling gradients by 0.00012842776777688414, model_norm_threshold=438.01873779296875 +2023-03-08 13:53:03,759 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.65, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=7.588e+12, grad_sumsq = 1.863e+14, orig_rms_sq=4.072e-02 +2023-03-08 13:53:10,038 WARNING [optim.py:389] (2/4) Scaling gradients by 0.007196913007646799, model_norm_threshold=438.01873779296875 +2023-03-08 13:53:10,199 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoders.4.encoder.layers.0.norm_final.eps with proportion 0.38, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=1.420e+09, grad_sumsq = 1.420e+09, orig_rms_sq=1.000e+00 +2023-03-08 13:53:13,988 WARNING [train.py:888] (2/4) Grad scale is small: 0.00390625 +2023-03-08 13:53:13,988 INFO [train.py:898] (2/4) Epoch 1, batch 400, loss[loss=0.9664, simple_loss=0.7856, pruned_loss=0.9254, over 17128.00 frames. ], tot_loss[loss=1.042, simple_loss=0.8723, pruned_loss=1.061, over 3106180.27 frames. ], batch size: 78, lr: 4.50e-02, grad_scale: 0.0078125 +2023-03-08 13:53:23,508 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.343e+02 2.188e+02 3.037e+02 6.402e+02 3.907e+06, threshold=6.074e+02, percent-clipped=33.0 +2023-03-08 13:53:44,547 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=439.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-08 13:53:52,769 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=13.05 vs. limit=2.0 +2023-03-08 13:53:53,281 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=448.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-08 13:53:55,324 INFO [train.py:898] (2/4) Epoch 1, batch 450, loss[loss=1.001, simple_loss=0.8, pruned_loss=0.9609, over 18300.00 frames. ], tot_loss[loss=1.023, simple_loss=0.8483, pruned_loss=1.029, over 3218337.40 frames. ], batch size: 54, lr: 4.75e-02, grad_scale: 0.0078125 +2023-03-08 13:53:59,208 WARNING [optim.py:389] (2/4) Scaling gradients by 0.001993334386497736, model_norm_threshold=607.3988037109375 +2023-03-08 13:53:59,368 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoders.4.encoder.layers.1.norm_final.eps with proportion 0.46, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=4.249e+10, grad_sumsq = 4.249e+10, orig_rms_sq=1.000e+00 +2023-03-08 13:54:00,199 WARNING [optim.py:389] (2/4) Scaling gradients by 0.009787621907889843, model_norm_threshold=607.3988037109375 +2023-03-08 13:54:00,357 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoders.4.encoder.layers.0.norm_final.eps with proportion 0.37, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=1.407e+09, grad_sumsq = 1.407e+09, orig_rms_sq=1.000e+00 +2023-03-08 13:54:07,923 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.8858, 5.8826, 5.3073, 5.7563, 5.8709, 5.6123, 5.8856, 5.8950], + device='cuda:2'), covar=tensor([0.0419, 0.0367, 0.0808, 0.0909, 0.0280, 0.0337, 0.0388, 0.0268], + device='cuda:2'), in_proj_covar=tensor([0.0009, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009, 0.0009, 0.0010], + device='cuda:2'), out_proj_covar=tensor([8.9065e-06, 8.8252e-06, 9.0013e-06, 8.7053e-06, 8.8510e-06, 8.8930e-06, + 8.5747e-06, 8.9495e-06], device='cuda:2') +2023-03-08 13:54:09,174 WARNING [optim.py:389] (2/4) Scaling gradients by 0.07029950618743896, model_norm_threshold=607.3988037109375 +2023-03-08 13:54:09,335 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.83, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=6.195e+07, grad_sumsq = 1.377e+09, orig_rms_sq=4.498e-02 +2023-03-08 13:54:20,904 WARNING [optim.py:389] (2/4) Scaling gradients by 0.008813662454485893, model_norm_threshold=607.3988037109375 +2023-03-08 13:54:21,114 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.85, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=4.028e+09, grad_sumsq = 8.824e+10, orig_rms_sq=4.564e-02 +2023-03-08 13:54:23,942 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=59.48 vs. limit=5.0 +2023-03-08 13:54:27,985 WARNING [optim.py:389] (2/4) Scaling gradients by 0.024284733459353447, model_norm_threshold=607.3988037109375 +2023-03-08 13:54:28,150 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.83, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=5.221e+08, grad_sumsq = 1.167e+10, orig_rms_sq=4.473e-02 +2023-03-08 13:54:38,282 WARNING [optim.py:389] (2/4) Scaling gradients by 0.0006707996362820268, model_norm_threshold=607.3988037109375 +2023-03-08 13:54:38,444 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoders.4.encoder.layers.1.norm_final.eps with proportion 0.69, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=5.647e+11, grad_sumsq = 5.647e+11, orig_rms_sq=1.000e+00 +2023-03-08 13:54:38,474 WARNING [train.py:888] (2/4) Grad scale is small: 0.0078125 +2023-03-08 13:54:38,474 INFO [train.py:898] (2/4) Epoch 1, batch 500, loss[loss=1.028, simple_loss=0.8082, pruned_loss=0.9853, over 18273.00 frames. ], tot_loss[loss=1.011, simple_loss=0.8298, pruned_loss=1.007, over 3290816.08 frames. ], batch size: 60, lr: 4.99e-02, grad_scale: 0.015625 +2023-03-08 13:54:42,457 WARNING [optim.py:389] (2/4) Scaling gradients by 0.006503281649202108, model_norm_threshold=607.3988037109375 +2023-03-08 13:54:42,618 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoders.1.out_combiner.weight1 with proportion 0.48, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=4.173e+09, grad_sumsq = 4.173e+09, orig_rms_sq=1.000e+00 +2023-03-08 13:54:48,221 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.357e+02 2.649e+02 4.541e+02 8.074e+02 9.055e+05, threshold=9.081e+02, percent-clipped=35.0 +2023-03-08 13:54:48,221 WARNING [optim.py:389] (2/4) Scaling gradients by 0.07500762492418289, model_norm_threshold=908.1141357421875 +2023-03-08 13:54:48,404 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoders.1.out_combiner.weight1 with proportion 0.80, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=1.178e+08, grad_sumsq = 1.178e+08, orig_rms_sq=1.000e+00 +2023-03-08 13:54:53,813 WARNING [optim.py:389] (2/4) Scaling gradients by 0.00848373118788004, model_norm_threshold=908.1141357421875 +2023-03-08 13:54:53,974 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoders.2.out_combiner.weight1 with proportion 0.43, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=4.921e+09, grad_sumsq = 4.921e+09, orig_rms_sq=1.000e+00 +2023-03-08 13:54:54,844 WARNING [optim.py:389] (2/4) Scaling gradients by 0.0037236642092466354, model_norm_threshold=908.1141357421875 +2023-03-08 13:54:55,005 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.skip_modules.4.weight1 with proportion 0.69, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=4.127e+10, grad_sumsq = 4.127e+10, orig_rms_sq=1.000e+00 +2023-03-08 13:54:56,910 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4411, 5.3033, 5.1502, 5.3318, 5.3838, 5.5261, 5.4653, 5.3734], + device='cuda:2'), covar=tensor([0.0636, 0.1036, 0.1543, 0.0822, 0.1113, 0.0514, 0.0817, 0.0176], + device='cuda:2'), in_proj_covar=tensor([0.0015, 0.0015, 0.0016, 0.0016, 0.0014, 0.0015, 0.0014, 0.0016], + device='cuda:2'), out_proj_covar=tensor([1.4228e-05, 1.4794e-05, 1.4599e-05, 1.5465e-05, 1.4079e-05, 1.4170e-05, + 1.4001e-05, 1.5481e-05], device='cuda:2') +2023-03-08 13:55:00,562 WARNING [optim.py:389] (2/4) Scaling gradients by 0.0036443807184696198, model_norm_threshold=908.1141357421875 +2023-03-08 13:55:00,747 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.88, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=5.460e+10, grad_sumsq = 1.061e+12, orig_rms_sq=5.145e-02 +2023-03-08 13:55:01,568 WARNING [optim.py:389] (2/4) Scaling gradients by 0.004900030791759491, model_norm_threshold=908.1141357421875 +2023-03-08 13:55:01,728 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.35, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=1.186e+10, grad_sumsq = 2.280e+11, orig_rms_sq=5.204e-02 +2023-03-08 13:55:02,552 WARNING [optim.py:389] (2/4) Scaling gradients by 0.07598941773176193, model_norm_threshold=908.1141357421875 +2023-03-08 13:55:02,706 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.93, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=1.329e+08, grad_sumsq = 2.553e+09, orig_rms_sq=5.204e-02 +2023-03-08 13:55:05,075 WARNING [optim.py:389] (2/4) Scaling gradients by 0.028503550216555595, model_norm_threshold=908.1141357421875 +2023-03-08 13:55:05,237 INFO [optim.py:451] (2/4) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.78, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=7.871e+08, grad_sumsq = 1.620e+10, orig_rms_sq=4.859e-02 +2023-03-08 13:55:20,760 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=15.41 vs. limit=2.0 +2023-03-08 13:55:21,069 INFO [train.py:898] (2/4) Epoch 1, batch 550, loss[loss=1.015, simple_loss=0.7996, pruned_loss=0.9341, over 16138.00 frames. ], tot_loss[loss=1.005, simple_loss=0.8167, pruned_loss=0.9891, over 3360811.10 frames. ], batch size: 94, lr: 4.98e-02, grad_scale: 0.015625 +2023-03-08 13:55:24,495 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0329, 3.0314, 5.0877, 3.0323, 4.0675, 4.8975, 4.0948, 4.9763], + device='cuda:2'), covar=tensor([0.0221, 0.1483, 0.0322, 0.1298, 0.2432, 0.0342, 0.0591, 0.0331], + device='cuda:2'), in_proj_covar=tensor([0.0011, 0.0010, 0.0010, 0.0010, 0.0011, 0.0010, 0.0010, 0.0011], + device='cuda:2'), out_proj_covar=tensor([1.0326e-05, 1.0415e-05, 1.0388e-05, 1.0656e-05, 1.0215e-05, 1.0072e-05, + 9.7096e-06, 1.0249e-05], device='cuda:2') +2023-03-08 13:55:30,000 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=562.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 13:55:34,766 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=568.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 13:55:50,699 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=9.89 vs. limit=2.0 +2023-03-08 13:55:51,863 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=590.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 13:56:00,917 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=600.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-08 13:56:01,494 INFO [train.py:898] (2/4) Epoch 1, batch 600, loss[loss=0.9223, simple_loss=0.7084, pruned_loss=0.8649, over 18484.00 frames. ], tot_loss[loss=0.9984, simple_loss=0.8024, pruned_loss=0.9719, over 3406946.60 frames. ], batch size: 47, lr: 4.98e-02, grad_scale: 0.03125 +2023-03-08 13:56:11,698 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.187e+02 4.468e+02 7.848e+02 1.243e+03 2.492e+05, threshold=1.570e+03, percent-clipped=35.0 +2023-03-08 13:56:19,754 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=623.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-08 13:56:24,146 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=629.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-08 13:56:33,916 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=53.09 vs. limit=5.0 +2023-03-08 13:56:33,961 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=11.12 vs. limit=2.0 +2023-03-08 13:56:34,670 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=4.49 vs. limit=2.0 +2023-03-08 13:56:39,118 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=648.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 13:56:41,333 INFO [train.py:898] (2/4) Epoch 1, batch 650, loss[loss=1.014, simple_loss=0.7701, pruned_loss=0.9391, over 17764.00 frames. ], tot_loss[loss=0.9989, simple_loss=0.7942, pruned_loss=0.9611, over 3451646.35 frames. ], batch size: 70, lr: 4.98e-02, grad_scale: 0.03125 +2023-03-08 13:56:42,251 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=651.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-08 13:56:42,924 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=652.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-08 13:57:22,348 INFO [train.py:898] (2/4) Epoch 1, batch 700, loss[loss=1.006, simple_loss=0.7636, pruned_loss=0.9062, over 18496.00 frames. ], tot_loss[loss=0.9986, simple_loss=0.7859, pruned_loss=0.9492, over 3485976.28 frames. ], batch size: 51, lr: 4.98e-02, grad_scale: 0.0625 +2023-03-08 13:57:32,513 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=3.88 vs. limit=2.0 +2023-03-08 13:57:33,534 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.059e+02 3.958e+02 5.670e+02 9.058e+02 3.205e+03, threshold=1.134e+03, percent-clipped=9.0 +2023-03-08 13:57:43,033 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=5.51 vs. limit=2.0 +2023-03-08 13:57:54,403 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=739.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-08 13:57:54,660 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=4.48 vs. limit=2.0 +2023-03-08 13:57:57,524 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=743.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-08 13:58:03,606 INFO [train.py:898] (2/4) Epoch 1, batch 750, loss[loss=1.055, simple_loss=0.7947, pruned_loss=0.9358, over 18339.00 frames. ], tot_loss[loss=1.001, simple_loss=0.7802, pruned_loss=0.938, over 3508527.23 frames. ], batch size: 55, lr: 4.97e-02, grad_scale: 0.0625 +2023-03-08 13:58:12,336 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=10.66 vs. limit=2.0 +2023-03-08 13:58:17,535 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=72.04 vs. limit=5.0 +2023-03-08 13:58:34,297 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=787.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 13:58:42,315 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1264, 5.0513, 5.1179, 5.0650, 5.0982, 5.1111, 5.0507, 5.0962], + device='cuda:2'), covar=tensor([0.0173, 0.0215, 0.0126, 0.0149, 0.0185, 0.0179, 0.0249, 0.0281], + device='cuda:2'), in_proj_covar=tensor([0.0011, 0.0011, 0.0011, 0.0011, 0.0011, 0.0011, 0.0011, 0.0011], + device='cuda:2'), out_proj_covar=tensor([1.1064e-05, 1.1297e-05, 1.1152e-05, 1.0664e-05, 1.1602e-05, 1.1025e-05, + 1.0868e-05, 1.1427e-05], device='cuda:2') +2023-03-08 13:58:45,696 INFO [train.py:898] (2/4) Epoch 1, batch 800, loss[loss=1.022, simple_loss=0.7739, pruned_loss=0.8744, over 16166.00 frames. ], tot_loss[loss=1.002, simple_loss=0.7743, pruned_loss=0.927, over 3525741.97 frames. ], batch size: 94, lr: 4.97e-02, grad_scale: 0.125 +2023-03-08 13:58:55,627 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.968e+02 3.286e+02 5.870e+02 9.737e+02 2.138e+03, threshold=1.174e+03, percent-clipped=19.0 +2023-03-08 13:59:06,908 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=6.30 vs. limit=2.0 +2023-03-08 13:59:23,838 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=847.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 13:59:26,842 INFO [train.py:898] (2/4) Epoch 1, batch 850, loss[loss=1.112, simple_loss=0.8238, pruned_loss=0.9604, over 18273.00 frames. ], tot_loss[loss=1.003, simple_loss=0.7682, pruned_loss=0.9159, over 3554953.27 frames. ], batch size: 57, lr: 4.96e-02, grad_scale: 0.125 +2023-03-08 13:59:33,572 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=10.44 vs. limit=2.0 +2023-03-08 13:59:46,378 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=28.91 vs. limit=5.0 +2023-03-08 14:00:09,710 INFO [train.py:898] (2/4) Epoch 1, batch 900, loss[loss=1.05, simple_loss=0.7743, pruned_loss=0.8909, over 18450.00 frames. ], tot_loss[loss=1.003, simple_loss=0.7619, pruned_loss=0.9031, over 3575469.64 frames. ], batch size: 59, lr: 4.96e-02, grad_scale: 0.25 +2023-03-08 14:00:14,804 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1020, 5.8554, 5.4357, 5.6977, 5.0417, 5.8809, 5.0842, 5.7021], + device='cuda:2'), covar=tensor([2.5265, 0.9735, 1.8294, 0.9528, 2.0784, 1.0660, 1.5137, 1.8724], + device='cuda:2'), in_proj_covar=tensor([0.0028, 0.0030, 0.0029, 0.0031, 0.0027, 0.0028, 0.0027, 0.0034], + device='cuda:2'), out_proj_covar=tensor([2.3037e-05, 2.3328e-05, 2.3049e-05, 2.4891e-05, 2.2394e-05, 2.3202e-05, + 2.2496e-05, 2.8493e-05], device='cuda:2') +2023-03-08 14:00:15,574 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=908.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-08 14:00:17,404 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=4.00 vs. limit=2.0 +2023-03-08 14:00:19,236 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.803e+02 2.708e+02 4.690e+02 7.118e+02 2.600e+03, threshold=9.379e+02, percent-clipped=5.0 +2023-03-08 14:00:24,221 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=918.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-08 14:00:28,927 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=924.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-08 14:00:47,936 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=946.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-08 14:00:51,746 INFO [train.py:898] (2/4) Epoch 1, batch 950, loss[loss=1.049, simple_loss=0.7672, pruned_loss=0.8784, over 18378.00 frames. ], tot_loss[loss=1.006, simple_loss=0.7586, pruned_loss=0.8923, over 3577905.14 frames. ], batch size: 50, lr: 4.96e-02, grad_scale: 0.25 +2023-03-08 14:00:52,789 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=952.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-08 14:01:03,053 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=3.02 vs. limit=2.0 +2023-03-08 14:01:22,712 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([6.0205, 6.0179, 6.0123, 5.9330, 6.0047, 6.0238, 5.9553, 6.0244], + device='cuda:2'), covar=tensor([0.0349, 0.0469, 0.0544, 0.1462, 0.0720, 0.0272, 0.1245, 0.0332], + device='cuda:2'), in_proj_covar=tensor([0.0026, 0.0027, 0.0026, 0.0026, 0.0027, 0.0026, 0.0027, 0.0027], + device='cuda:2'), out_proj_covar=tensor([2.6969e-05, 2.9003e-05, 2.6764e-05, 2.8878e-05, 2.7771e-05, 2.6720e-05, + 2.7151e-05, 2.7408e-05], device='cuda:2') +2023-03-08 14:01:28,968 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=6.55 vs. limit=2.0 +2023-03-08 14:01:30,427 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.9420, 5.9853, 5.9284, 5.9364, 5.9804, 5.9884, 6.0088, 6.0057], + device='cuda:2'), covar=tensor([0.0530, 0.0385, 0.0167, 0.0387, 0.0262, 0.0334, 0.0308, 0.0289], + device='cuda:2'), in_proj_covar=tensor([0.0013, 0.0013, 0.0014, 0.0013, 0.0012, 0.0013, 0.0012, 0.0013], + device='cuda:2'), out_proj_covar=tensor([1.2222e-05, 1.2748e-05, 1.2530e-05, 1.3296e-05, 1.2098e-05, 1.2168e-05, + 1.2010e-05, 1.3355e-05], device='cuda:2') +2023-03-08 14:01:33,196 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1000.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:01:33,771 INFO [train.py:898] (2/4) Epoch 1, batch 1000, loss[loss=1.029, simple_loss=0.7629, pruned_loss=0.83, over 18144.00 frames. ], tot_loss[loss=1.007, simple_loss=0.7552, pruned_loss=0.8794, over 3585959.21 frames. ], batch size: 62, lr: 4.95e-02, grad_scale: 0.5 +2023-03-08 14:01:43,698 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.541e+02 3.491e+02 4.812e+02 7.825e+02 1.437e+03, threshold=9.623e+02, percent-clipped=15.0 +2023-03-08 14:01:49,305 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=5.58 vs. limit=2.0 +2023-03-08 14:02:10,214 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1043.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-08 14:02:17,218 INFO [train.py:898] (2/4) Epoch 1, batch 1050, loss[loss=1.015, simple_loss=0.7615, pruned_loss=0.7924, over 12269.00 frames. ], tot_loss[loss=1.007, simple_loss=0.7529, pruned_loss=0.8624, over 3585931.19 frames. ], batch size: 130, lr: 4.95e-02, grad_scale: 0.5 +2023-03-08 14:02:22,713 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5535, 4.4399, 4.6082, 4.2265, 4.2558, 4.4419, 4.4418, 4.5706], + device='cuda:2'), covar=tensor([0.0350, 0.0606, 0.0405, 0.0703, 0.1579, 0.0410, 0.0534, 0.0454], + device='cuda:2'), in_proj_covar=tensor([0.0017, 0.0016, 0.0016, 0.0016, 0.0017, 0.0016, 0.0015, 0.0017], + device='cuda:2'), out_proj_covar=tensor([1.6213e-05, 1.6340e-05, 1.6268e-05, 1.6749e-05, 1.6221e-05, 1.5877e-05, + 1.5222e-05, 1.6115e-05], device='cuda:2') +2023-03-08 14:02:39,450 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=10.25 vs. limit=5.0 +2023-03-08 14:02:41,673 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4324, 5.4452, 5.4854, 5.4708, 5.5139, 5.4641, 5.4586, 5.4446], + device='cuda:2'), covar=tensor([0.0308, 0.0383, 0.0304, 0.0310, 0.0311, 0.0265, 0.0374, 0.0435], + device='cuda:2'), in_proj_covar=tensor([0.0016, 0.0016, 0.0016, 0.0016, 0.0016, 0.0015, 0.0016, 0.0017], + device='cuda:2'), out_proj_covar=tensor([1.6140e-05, 1.6992e-05, 1.5674e-05, 1.5983e-05, 1.6463e-05, 1.6332e-05, + 1.6207e-05, 1.5961e-05], device='cuda:2') +2023-03-08 14:02:52,450 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1091.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:03:01,210 INFO [train.py:898] (2/4) Epoch 1, batch 1100, loss[loss=1.001, simple_loss=0.7607, pruned_loss=0.7559, over 18558.00 frames. ], tot_loss[loss=0.9989, simple_loss=0.7481, pruned_loss=0.8368, over 3583240.61 frames. ], batch size: 54, lr: 4.94e-02, grad_scale: 1.0 +2023-03-08 14:03:07,518 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=5.97 vs. limit=2.0 +2023-03-08 14:03:10,799 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.010e+02 4.676e+02 7.290e+02 9.507e+02 1.781e+03, threshold=1.458e+03, percent-clipped=22.0 +2023-03-08 14:03:44,980 INFO [train.py:898] (2/4) Epoch 1, batch 1150, loss[loss=0.9755, simple_loss=0.7638, pruned_loss=0.6982, over 18286.00 frames. ], tot_loss[loss=0.9815, simple_loss=0.7404, pruned_loss=0.8001, over 3590151.56 frames. ], batch size: 57, lr: 4.94e-02, grad_scale: 1.0 +2023-03-08 14:03:46,917 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1153.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:04:28,511 INFO [train.py:898] (2/4) Epoch 1, batch 1200, loss[loss=0.875, simple_loss=0.7022, pruned_loss=0.599, over 18347.00 frames. ], tot_loss[loss=0.9559, simple_loss=0.7277, pruned_loss=0.7581, over 3581501.37 frames. ], batch size: 55, lr: 4.93e-02, grad_scale: 2.0 +2023-03-08 14:04:30,943 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1203.0, num_to_drop=2, layers_to_drop={0, 3} +2023-03-08 14:04:38,787 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.547e+02 6.963e+02 9.525e+02 1.418e+03 3.358e+03, threshold=1.905e+03, percent-clipped=24.0 +2023-03-08 14:04:39,943 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1214.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-08 14:04:43,213 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1218.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-08 14:04:47,883 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1224.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:05:06,347 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1246.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 14:05:11,319 INFO [train.py:898] (2/4) Epoch 1, batch 1250, loss[loss=0.8295, simple_loss=0.6823, pruned_loss=0.5439, over 18473.00 frames. ], tot_loss[loss=0.9238, simple_loss=0.7123, pruned_loss=0.7107, over 3583223.76 frames. ], batch size: 59, lr: 4.92e-02, grad_scale: 2.0 +2023-03-08 14:05:24,174 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1266.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:05:28,927 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1272.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:05:39,810 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.46 vs. limit=2.0 +2023-03-08 14:05:48,370 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1294.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:05:49,498 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.9000, 5.4095, 5.5998, 5.1325, 5.9423, 5.1457, 5.8417, 4.9294], + device='cuda:2'), covar=tensor([0.0940, 0.2762, 0.2114, 0.3505, 0.0625, 0.5230, 0.0957, 0.6500], + device='cuda:2'), in_proj_covar=tensor([0.0037, 0.0030, 0.0036, 0.0036, 0.0030, 0.0034, 0.0032, 0.0034], + device='cuda:2'), out_proj_covar=tensor([2.4828e-05, 1.9932e-05, 2.4945e-05, 2.5007e-05, 1.8436e-05, 2.3614e-05, + 1.9713e-05, 2.4570e-05], device='cuda:2') +2023-03-08 14:05:53,954 INFO [train.py:898] (2/4) Epoch 1, batch 1300, loss[loss=0.6397, simple_loss=0.5405, pruned_loss=0.4012, over 18154.00 frames. ], tot_loss[loss=0.8867, simple_loss=0.693, pruned_loss=0.6617, over 3590828.28 frames. ], batch size: 44, lr: 4.92e-02, grad_scale: 2.0 +2023-03-08 14:06:04,914 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.724e+02 7.521e+02 1.026e+03 1.273e+03 2.275e+03, threshold=2.053e+03, percent-clipped=5.0 +2023-03-08 14:06:13,964 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([6.3017, 6.3430, 6.3076, 6.3537, 6.3561, 6.3314, 6.2789, 6.1753], + device='cuda:2'), covar=tensor([0.0355, 0.0264, 0.0372, 0.0319, 0.0253, 0.0290, 0.0581, 0.0504], + device='cuda:2'), in_proj_covar=tensor([0.0042, 0.0039, 0.0039, 0.0041, 0.0037, 0.0040, 0.0042, 0.0040], + device='cuda:2'), out_proj_covar=tensor([4.1398e-05, 4.0427e-05, 3.7970e-05, 3.8887e-05, 4.0479e-05, 3.8797e-05, + 4.2391e-05, 4.0207e-05], device='cuda:2') +2023-03-08 14:06:38,450 INFO [train.py:898] (2/4) Epoch 1, batch 1350, loss[loss=0.7157, simple_loss=0.6137, pruned_loss=0.4369, over 16358.00 frames. ], tot_loss[loss=0.8507, simple_loss=0.6748, pruned_loss=0.6156, over 3588514.47 frames. ], batch size: 94, lr: 4.91e-02, grad_scale: 2.0 +2023-03-08 14:07:22,135 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8401, 3.7155, 3.6734, 3.8452, 3.5999, 3.8756, 3.6707, 3.7871], + device='cuda:2'), covar=tensor([0.2596, 0.4175, 0.2231, 0.1877, 0.2110, 0.2060, 0.2686, 0.2689], + device='cuda:2'), in_proj_covar=tensor([0.0043, 0.0039, 0.0042, 0.0038, 0.0041, 0.0041, 0.0043, 0.0044], + device='cuda:2'), out_proj_covar=tensor([4.3509e-05, 4.1093e-05, 4.0595e-05, 4.0248e-05, 4.1140e-05, 4.0821e-05, + 4.0958e-05, 4.2926e-05], device='cuda:2') +2023-03-08 14:07:24,106 INFO [train.py:898] (2/4) Epoch 1, batch 1400, loss[loss=0.6522, simple_loss=0.571, pruned_loss=0.3849, over 18199.00 frames. ], tot_loss[loss=0.8113, simple_loss=0.6538, pruned_loss=0.5693, over 3590695.50 frames. ], batch size: 60, lr: 4.91e-02, grad_scale: 2.0 +2023-03-08 14:07:35,946 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.189e+02 7.494e+02 9.042e+02 1.119e+03 2.290e+03, threshold=1.808e+03, percent-clipped=1.0 +2023-03-08 14:07:55,146 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1434.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:08:09,516 INFO [train.py:898] (2/4) Epoch 1, batch 1450, loss[loss=0.7953, simple_loss=0.668, pruned_loss=0.4909, over 12194.00 frames. ], tot_loss[loss=0.7774, simple_loss=0.6359, pruned_loss=0.5297, over 3587207.53 frames. ], batch size: 130, lr: 4.90e-02, grad_scale: 2.0 +2023-03-08 14:08:50,623 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1495.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-08 14:08:55,492 INFO [train.py:898] (2/4) Epoch 1, batch 1500, loss[loss=0.6646, simple_loss=0.5855, pruned_loss=0.3853, over 18360.00 frames. ], tot_loss[loss=0.7438, simple_loss=0.6173, pruned_loss=0.4927, over 3583999.45 frames. ], batch size: 56, lr: 4.89e-02, grad_scale: 2.0 +2023-03-08 14:08:57,723 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1503.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:09:03,875 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1509.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-08 14:09:07,634 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.367e+02 6.843e+02 9.252e+02 1.183e+03 2.667e+03, threshold=1.850e+03, percent-clipped=7.0 +2023-03-08 14:09:43,252 INFO [train.py:898] (2/4) Epoch 1, batch 1550, loss[loss=0.5914, simple_loss=0.5385, pruned_loss=0.3279, over 18483.00 frames. ], tot_loss[loss=0.7139, simple_loss=0.6017, pruned_loss=0.4598, over 3578476.73 frames. ], batch size: 53, lr: 4.89e-02, grad_scale: 2.0 +2023-03-08 14:09:43,428 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1551.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:09:57,956 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1566.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:10:30,626 INFO [train.py:898] (2/4) Epoch 1, batch 1600, loss[loss=0.5635, simple_loss=0.5074, pruned_loss=0.3159, over 18251.00 frames. ], tot_loss[loss=0.6832, simple_loss=0.5847, pruned_loss=0.4283, over 3592371.03 frames. ], batch size: 45, lr: 4.88e-02, grad_scale: 4.0 +2023-03-08 14:10:39,820 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1611.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:10:41,383 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.398e+02 7.144e+02 8.708e+02 1.119e+03 2.244e+03, threshold=1.742e+03, percent-clipped=2.0 +2023-03-08 14:10:44,060 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.6569, 5.5243, 5.5153, 5.2578, 5.7328, 5.3582, 5.6162, 5.4147], + device='cuda:2'), covar=tensor([0.0294, 0.0403, 0.0377, 0.0673, 0.0161, 0.0695, 0.0308, 0.0549], + device='cuda:2'), in_proj_covar=tensor([0.0030, 0.0025, 0.0029, 0.0029, 0.0024, 0.0028, 0.0025, 0.0027], + device='cuda:2'), out_proj_covar=tensor([1.8539e-05, 1.5174e-05, 1.8263e-05, 1.8679e-05, 1.2993e-05, 1.7386e-05, + 1.4393e-05, 1.6718e-05], device='cuda:2') +2023-03-08 14:10:55,216 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1627.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-08 14:11:16,711 INFO [train.py:898] (2/4) Epoch 1, batch 1650, loss[loss=0.5179, simple_loss=0.4847, pruned_loss=0.2768, over 18382.00 frames. ], tot_loss[loss=0.6552, simple_loss=0.5691, pruned_loss=0.4004, over 3599397.59 frames. ], batch size: 46, lr: 4.87e-02, grad_scale: 4.0 +2023-03-08 14:11:36,764 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6741, 5.1863, 5.3375, 5.1142, 5.0268, 5.0794, 5.2201, 4.9353], + device='cuda:2'), covar=tensor([0.2147, 0.1137, 0.0907, 0.1214, 0.1478, 0.1208, 0.1346, 0.1374], + device='cuda:2'), in_proj_covar=tensor([0.0086, 0.0072, 0.0064, 0.0073, 0.0077, 0.0076, 0.0078, 0.0077], + device='cuda:2'), out_proj_covar=tensor([8.4519e-05, 7.1940e-05, 6.1103e-05, 6.6479e-05, 7.5074e-05, 7.2886e-05, + 7.9096e-05, 7.5087e-05], device='cuda:2') +2023-03-08 14:11:36,835 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1672.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-08 14:11:42,876 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.92 vs. limit=2.0 +2023-03-08 14:12:04,571 INFO [train.py:898] (2/4) Epoch 1, batch 1700, loss[loss=0.5049, simple_loss=0.4718, pruned_loss=0.27, over 18253.00 frames. ], tot_loss[loss=0.6305, simple_loss=0.5558, pruned_loss=0.376, over 3599287.36 frames. ], batch size: 45, lr: 4.86e-02, grad_scale: 4.0 +2023-03-08 14:12:15,865 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.590e+02 6.591e+02 8.765e+02 1.033e+03 1.987e+03, threshold=1.753e+03, percent-clipped=3.0 +2023-03-08 14:12:20,446 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=2.07 vs. limit=2.0 +2023-03-08 14:12:53,426 INFO [train.py:898] (2/4) Epoch 1, batch 1750, loss[loss=0.539, simple_loss=0.5147, pruned_loss=0.2811, over 18398.00 frames. ], tot_loss[loss=0.6071, simple_loss=0.543, pruned_loss=0.3538, over 3597381.75 frames. ], batch size: 50, lr: 4.86e-02, grad_scale: 4.0 +2023-03-08 14:13:30,720 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1790.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:13:41,359 INFO [train.py:898] (2/4) Epoch 1, batch 1800, loss[loss=0.5406, simple_loss=0.5131, pruned_loss=0.2839, over 17937.00 frames. ], tot_loss[loss=0.5923, simple_loss=0.5353, pruned_loss=0.3389, over 3589835.88 frames. ], batch size: 65, lr: 4.85e-02, grad_scale: 4.0 +2023-03-08 14:13:49,033 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1809.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:13:52,145 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.336e+02 7.100e+02 8.751e+02 1.042e+03 2.911e+03, threshold=1.750e+03, percent-clipped=4.0 +2023-03-08 14:14:17,003 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1838.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:14:25,515 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.4316, 4.4351, 4.2181, 4.1948, 4.3202, 4.3037, 4.2256, 5.4842], + device='cuda:2'), covar=tensor([0.1341, 0.1365, 0.1350, 0.1540, 0.1891, 0.1139, 0.1690, 0.0182], + device='cuda:2'), in_proj_covar=tensor([0.0021, 0.0020, 0.0019, 0.0021, 0.0021, 0.0020, 0.0019, 0.0015], + device='cuda:2'), out_proj_covar=tensor([2.1969e-05, 1.8796e-05, 1.6449e-05, 1.8977e-05, 2.2844e-05, 2.0593e-05, + 1.7676e-05, 1.3942e-05], device='cuda:2') +2023-03-08 14:14:28,756 INFO [train.py:898] (2/4) Epoch 1, batch 1850, loss[loss=0.6041, simple_loss=0.5447, pruned_loss=0.3339, over 12286.00 frames. ], tot_loss[loss=0.5732, simple_loss=0.5249, pruned_loss=0.3217, over 3587087.87 frames. ], batch size: 130, lr: 4.84e-02, grad_scale: 4.0 +2023-03-08 14:14:35,119 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=1857.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:14:35,233 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1857.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:14:38,184 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7080, 5.1387, 5.0997, 5.1924, 5.0754, 5.4288, 4.9834, 4.7696], + device='cuda:2'), covar=tensor([0.0479, 0.0324, 0.0513, 0.0524, 0.0695, 0.0292, 0.0658, 0.0934], + device='cuda:2'), in_proj_covar=tensor([0.0035, 0.0031, 0.0037, 0.0033, 0.0038, 0.0032, 0.0036, 0.0036], + device='cuda:2'), out_proj_covar=tensor([3.2459e-05, 2.8623e-05, 3.4551e-05, 3.0671e-05, 3.4772e-05, 2.9666e-05, + 3.4197e-05, 3.1847e-05], device='cuda:2') +2023-03-08 14:14:46,631 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1941, 5.2740, 5.1770, 5.1238, 4.8847, 5.1798, 5.1601, 5.7305], + device='cuda:2'), covar=tensor([0.0317, 0.0216, 0.0354, 0.0291, 0.0522, 0.0192, 0.0248, 0.0032], + device='cuda:2'), in_proj_covar=tensor([0.0020, 0.0019, 0.0018, 0.0020, 0.0021, 0.0019, 0.0018, 0.0015], + device='cuda:2'), out_proj_covar=tensor([2.1295e-05, 1.8352e-05, 1.5711e-05, 1.8269e-05, 2.2499e-05, 1.9763e-05, + 1.6752e-05, 1.3361e-05], device='cuda:2') +2023-03-08 14:14:47,416 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1870.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:15:15,935 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1899.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-08 14:15:17,458 INFO [train.py:898] (2/4) Epoch 1, batch 1900, loss[loss=0.5309, simple_loss=0.5082, pruned_loss=0.2765, over 15995.00 frames. ], tot_loss[loss=0.5586, simple_loss=0.5181, pruned_loss=0.3078, over 3589719.04 frames. ], batch size: 94, lr: 4.83e-02, grad_scale: 4.0 +2023-03-08 14:15:29,151 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.595e+02 6.983e+02 8.615e+02 1.111e+03 2.145e+03, threshold=1.723e+03, percent-clipped=1.0 +2023-03-08 14:15:34,113 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1918.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-08 14:15:37,703 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1922.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:15:46,042 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1931.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-08 14:16:06,106 INFO [train.py:898] (2/4) Epoch 1, batch 1950, loss[loss=0.5756, simple_loss=0.543, pruned_loss=0.3042, over 18337.00 frames. ], tot_loss[loss=0.5418, simple_loss=0.509, pruned_loss=0.2936, over 3594406.44 frames. ], batch size: 55, lr: 4.83e-02, grad_scale: 4.0 +2023-03-08 14:16:21,748 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1967.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:16:30,543 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-08 14:17:00,017 INFO [train.py:898] (2/4) Epoch 1, batch 2000, loss[loss=0.5854, simple_loss=0.54, pruned_loss=0.3154, over 12302.00 frames. ], tot_loss[loss=0.5299, simple_loss=0.5023, pruned_loss=0.2836, over 3590813.68 frames. ], batch size: 130, lr: 4.82e-02, grad_scale: 8.0 +2023-03-08 14:17:12,317 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.598e+02 6.794e+02 8.524e+02 1.041e+03 1.894e+03, threshold=1.705e+03, percent-clipped=5.0 +2023-03-08 14:17:22,506 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4750, 3.9411, 3.9761, 3.8154, 3.7305, 3.7044, 3.6722, 3.8018], + device='cuda:2'), covar=tensor([0.0960, 0.0597, 0.0536, 0.0681, 0.0842, 0.0736, 0.0777, 0.0867], + device='cuda:2'), in_proj_covar=tensor([0.0053, 0.0059, 0.0046, 0.0054, 0.0061, 0.0058, 0.0059, 0.0056], + device='cuda:2'), out_proj_covar=tensor([4.7392e-05, 5.1721e-05, 4.2668e-05, 4.8730e-05, 5.5007e-05, 5.2760e-05, + 5.5382e-05, 4.7261e-05], device='cuda:2') +2023-03-08 14:17:35,812 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3627, 5.0289, 5.1016, 5.3287, 5.1798, 4.8867, 5.1093, 4.6782], + device='cuda:2'), covar=tensor([0.0350, 0.0447, 0.0769, 0.0419, 0.0487, 0.0577, 0.0494, 0.0617], + device='cuda:2'), in_proj_covar=tensor([0.0076, 0.0087, 0.0090, 0.0081, 0.0085, 0.0106, 0.0103, 0.0104], + device='cuda:2'), out_proj_covar=tensor([7.4352e-05, 8.5901e-05, 9.1055e-05, 7.8550e-05, 7.8996e-05, 1.0993e-04, + 1.0519e-04, 1.0158e-04], device='cuda:2') +2023-03-08 14:17:53,239 INFO [train.py:898] (2/4) Epoch 1, batch 2050, loss[loss=0.5382, simple_loss=0.5107, pruned_loss=0.2829, over 12810.00 frames. ], tot_loss[loss=0.5133, simple_loss=0.4936, pruned_loss=0.2703, over 3572830.97 frames. ], batch size: 130, lr: 4.81e-02, grad_scale: 8.0 +2023-03-08 14:18:31,912 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6236, 4.7661, 4.4250, 4.3724, 4.4841, 4.7380, 4.3878, 4.3971], + device='cuda:2'), covar=tensor([0.0689, 0.0720, 0.0792, 0.0668, 0.0796, 0.0728, 0.0709, 0.1258], + device='cuda:2'), in_proj_covar=tensor([0.0118, 0.0115, 0.0100, 0.0104, 0.0114, 0.0090, 0.0103, 0.0127], + device='cuda:2'), out_proj_covar=tensor([1.0595e-04, 1.1485e-04, 9.9258e-05, 9.5609e-05, 1.1487e-04, 8.5649e-05, + 9.4465e-05, 1.1714e-04], device='cuda:2') +2023-03-08 14:18:34,069 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.2473, 5.0619, 5.1186, 4.7044, 4.5409, 4.7211, 4.6955, 4.7193], + device='cuda:2'), covar=tensor([0.1481, 0.0479, 0.0402, 0.0733, 0.0905, 0.0627, 0.0658, 0.0702], + device='cuda:2'), in_proj_covar=tensor([0.0052, 0.0057, 0.0045, 0.0051, 0.0059, 0.0056, 0.0057, 0.0053], + device='cuda:2'), out_proj_covar=tensor([4.7447e-05, 5.0089e-05, 4.1261e-05, 4.7124e-05, 5.3822e-05, 5.1776e-05, + 5.3165e-05, 4.5466e-05], device='cuda:2') +2023-03-08 14:18:34,092 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2090.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-08 14:18:45,465 INFO [train.py:898] (2/4) Epoch 1, batch 2100, loss[loss=0.4346, simple_loss=0.4668, pruned_loss=0.2012, over 18255.00 frames. ], tot_loss[loss=0.4957, simple_loss=0.4837, pruned_loss=0.2567, over 3580693.88 frames. ], batch size: 60, lr: 4.80e-02, grad_scale: 8.0 +2023-03-08 14:18:58,624 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.519e+02 5.789e+02 7.175e+02 9.165e+02 1.305e+03, threshold=1.435e+03, percent-clipped=0.0 +2023-03-08 14:19:12,525 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9723, 4.5365, 4.2463, 4.1358, 4.6254, 4.5944, 4.2426, 4.1514], + device='cuda:2'), covar=tensor([0.0452, 0.0403, 0.0349, 0.0641, 0.0279, 0.0315, 0.0693, 0.0543], + device='cuda:2'), in_proj_covar=tensor([0.0026, 0.0022, 0.0026, 0.0025, 0.0027, 0.0023, 0.0027, 0.0027], + device='cuda:2'), out_proj_covar=tensor([2.4448e-05, 1.9730e-05, 2.2723e-05, 2.2215e-05, 2.4362e-05, 1.9989e-05, + 2.5138e-05, 2.4190e-05], device='cuda:2') +2023-03-08 14:19:24,327 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2138.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:19:38,125 INFO [train.py:898] (2/4) Epoch 1, batch 2150, loss[loss=0.4269, simple_loss=0.4418, pruned_loss=0.206, over 18278.00 frames. ], tot_loss[loss=0.4802, simple_loss=0.475, pruned_loss=0.245, over 3589677.48 frames. ], batch size: 49, lr: 4.79e-02, grad_scale: 8.0 +2023-03-08 14:20:23,718 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2194.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:20:31,432 INFO [train.py:898] (2/4) Epoch 1, batch 2200, loss[loss=0.3867, simple_loss=0.4126, pruned_loss=0.1804, over 18278.00 frames. ], tot_loss[loss=0.4677, simple_loss=0.4677, pruned_loss=0.2357, over 3574654.92 frames. ], batch size: 45, lr: 4.78e-02, grad_scale: 8.0 +2023-03-08 14:20:39,209 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.39 vs. limit=5.0 +2023-03-08 14:20:44,692 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.109e+02 6.450e+02 8.005e+02 9.321e+02 1.802e+03, threshold=1.601e+03, percent-clipped=2.0 +2023-03-08 14:20:44,953 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2213.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:20:54,163 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2222.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-08 14:20:58,202 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2226.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:21:24,262 INFO [train.py:898] (2/4) Epoch 1, batch 2250, loss[loss=0.3926, simple_loss=0.4256, pruned_loss=0.1797, over 18558.00 frames. ], tot_loss[loss=0.4558, simple_loss=0.4605, pruned_loss=0.227, over 3585137.76 frames. ], batch size: 49, lr: 4.77e-02, grad_scale: 8.0 +2023-03-08 14:21:42,690 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2267.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 14:21:45,782 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2270.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:22:03,971 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8931, 4.9918, 5.0712, 4.5446, 4.2302, 4.6368, 4.3578, 4.7225], + device='cuda:2'), covar=tensor([0.1845, 0.0396, 0.0255, 0.0657, 0.0983, 0.0473, 0.0632, 0.0485], + device='cuda:2'), in_proj_covar=tensor([0.0059, 0.0064, 0.0050, 0.0057, 0.0067, 0.0062, 0.0066, 0.0061], + device='cuda:2'), out_proj_covar=tensor([5.7208e-05, 5.8558e-05, 4.7552e-05, 5.6957e-05, 6.5030e-05, 6.0522e-05, + 6.6698e-05, 5.5444e-05], device='cuda:2') +2023-03-08 14:22:17,953 INFO [train.py:898] (2/4) Epoch 1, batch 2300, loss[loss=0.4974, simple_loss=0.4895, pruned_loss=0.2526, over 13004.00 frames. ], tot_loss[loss=0.4464, simple_loss=0.4554, pruned_loss=0.2198, over 3582153.97 frames. ], batch size: 129, lr: 4.77e-02, grad_scale: 8.0 +2023-03-08 14:22:31,078 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.655e+02 6.513e+02 7.644e+02 9.367e+02 1.783e+03, threshold=1.529e+03, percent-clipped=1.0 +2023-03-08 14:22:33,986 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2315.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:22:37,066 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6175, 4.7727, 4.3601, 4.3995, 4.5127, 4.7760, 4.3886, 4.3727], + device='cuda:2'), covar=tensor([0.0502, 0.0559, 0.0635, 0.0517, 0.0671, 0.0521, 0.0606, 0.1070], + device='cuda:2'), in_proj_covar=tensor([0.0122, 0.0127, 0.0102, 0.0105, 0.0121, 0.0098, 0.0104, 0.0134], + device='cuda:2'), out_proj_covar=tensor([1.1080e-04, 1.3105e-04, 1.0543e-04, 9.8994e-05, 1.2249e-04, 9.5769e-05, + 9.8724e-05, 1.3118e-04], device='cuda:2') +2023-03-08 14:23:00,823 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2340.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:23:11,902 INFO [train.py:898] (2/4) Epoch 1, batch 2350, loss[loss=0.3694, simple_loss=0.404, pruned_loss=0.1674, over 17223.00 frames. ], tot_loss[loss=0.4395, simple_loss=0.4516, pruned_loss=0.2145, over 3582775.87 frames. ], batch size: 38, lr: 4.76e-02, grad_scale: 16.0 +2023-03-08 14:23:56,260 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9932, 4.1319, 3.8328, 3.7053, 3.9547, 3.5703, 4.0597, 3.3501], + device='cuda:2'), covar=tensor([0.0685, 0.0526, 0.1307, 0.0646, 0.0314, 0.0926, 0.0423, 0.1003], + device='cuda:2'), in_proj_covar=tensor([0.0039, 0.0035, 0.0038, 0.0035, 0.0038, 0.0040, 0.0038, 0.0047], + device='cuda:2'), out_proj_covar=tensor([3.8737e-05, 3.5237e-05, 4.2430e-05, 3.5821e-05, 3.8318e-05, 4.0792e-05, + 3.7384e-05, 4.9980e-05], device='cuda:2') +2023-03-08 14:24:05,195 INFO [train.py:898] (2/4) Epoch 1, batch 2400, loss[loss=0.4455, simple_loss=0.4583, pruned_loss=0.2163, over 17702.00 frames. ], tot_loss[loss=0.4312, simple_loss=0.4468, pruned_loss=0.2084, over 3591835.76 frames. ], batch size: 70, lr: 4.75e-02, grad_scale: 16.0 +2023-03-08 14:24:05,523 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2401.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-08 14:24:08,014 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.86 vs. limit=2.0 +2023-03-08 14:24:17,837 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.759e+02 6.046e+02 7.526e+02 8.987e+02 1.408e+03, threshold=1.505e+03, percent-clipped=0.0 +2023-03-08 14:24:49,421 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.57 vs. limit=5.0 +2023-03-08 14:24:58,101 INFO [train.py:898] (2/4) Epoch 1, batch 2450, loss[loss=0.3665, simple_loss=0.3928, pruned_loss=0.1701, over 18254.00 frames. ], tot_loss[loss=0.4235, simple_loss=0.4421, pruned_loss=0.2029, over 3589579.11 frames. ], batch size: 45, lr: 4.74e-02, grad_scale: 16.0 +2023-03-08 14:25:45,522 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2494.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 14:25:53,140 INFO [train.py:898] (2/4) Epoch 1, batch 2500, loss[loss=0.4079, simple_loss=0.4445, pruned_loss=0.1857, over 18121.00 frames. ], tot_loss[loss=0.4179, simple_loss=0.439, pruned_loss=0.1989, over 3577091.03 frames. ], batch size: 62, lr: 4.73e-02, grad_scale: 16.0 +2023-03-08 14:25:54,906 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.03 vs. limit=2.0 +2023-03-08 14:26:05,170 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.211e+02 5.891e+02 7.187e+02 8.781e+02 1.767e+03, threshold=1.437e+03, percent-clipped=2.0 +2023-03-08 14:26:05,579 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2513.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:26:20,351 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2526.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:26:37,715 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2542.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:26:47,795 INFO [train.py:898] (2/4) Epoch 1, batch 2550, loss[loss=0.3045, simple_loss=0.3493, pruned_loss=0.1299, over 18461.00 frames. ], tot_loss[loss=0.4126, simple_loss=0.4357, pruned_loss=0.195, over 3564548.71 frames. ], batch size: 43, lr: 4.72e-02, grad_scale: 16.0 +2023-03-08 14:26:58,316 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2561.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:27:12,157 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=2574.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:27:42,179 INFO [train.py:898] (2/4) Epoch 1, batch 2600, loss[loss=0.4122, simple_loss=0.4397, pruned_loss=0.1924, over 18503.00 frames. ], tot_loss[loss=0.4086, simple_loss=0.4338, pruned_loss=0.192, over 3560250.98 frames. ], batch size: 53, lr: 4.71e-02, grad_scale: 16.0 +2023-03-08 14:27:55,523 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.922e+02 6.237e+02 7.424e+02 9.705e+02 2.435e+03, threshold=1.485e+03, percent-clipped=1.0 +2023-03-08 14:27:56,890 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0791, 5.0988, 5.1569, 4.9033, 4.6548, 4.9133, 4.6024, 4.8421], + device='cuda:2'), covar=tensor([0.0651, 0.0253, 0.0217, 0.0245, 0.0405, 0.0219, 0.0463, 0.0295], + device='cuda:2'), in_proj_covar=tensor([0.0063, 0.0072, 0.0058, 0.0061, 0.0072, 0.0071, 0.0075, 0.0068], + device='cuda:2'), out_proj_covar=tensor([6.6146e-05, 7.1624e-05, 5.7405e-05, 6.9359e-05, 7.9908e-05, 7.5400e-05, + 8.7645e-05, 6.8811e-05], device='cuda:2') +2023-03-08 14:28:17,298 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.88 vs. limit=2.0 +2023-03-08 14:28:36,989 INFO [train.py:898] (2/4) Epoch 1, batch 2650, loss[loss=0.4089, simple_loss=0.4427, pruned_loss=0.1875, over 18314.00 frames. ], tot_loss[loss=0.4007, simple_loss=0.4286, pruned_loss=0.1865, over 3569266.14 frames. ], batch size: 54, lr: 4.70e-02, grad_scale: 16.0 +2023-03-08 14:29:00,871 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5519, 3.8205, 3.7993, 3.9321, 3.7358, 4.2188, 3.5213, 3.4623], + device='cuda:2'), covar=tensor([0.0320, 0.0309, 0.0217, 0.0183, 0.0277, 0.0147, 0.0203, 0.0354], + device='cuda:2'), in_proj_covar=tensor([0.0022, 0.0019, 0.0022, 0.0022, 0.0022, 0.0020, 0.0019, 0.0022], + device='cuda:2'), out_proj_covar=tensor([1.5604e-05, 1.2777e-05, 1.5085e-05, 1.6445e-05, 1.4670e-05, 1.3551e-05, + 1.2410e-05, 1.5192e-05], device='cuda:2') +2023-03-08 14:29:14,984 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2685.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:29:20,169 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-08 14:29:27,255 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2696.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:29:32,053 INFO [train.py:898] (2/4) Epoch 1, batch 2700, loss[loss=0.3997, simple_loss=0.4202, pruned_loss=0.1896, over 18530.00 frames. ], tot_loss[loss=0.3968, simple_loss=0.4263, pruned_loss=0.1837, over 3564224.69 frames. ], batch size: 49, lr: 4.69e-02, grad_scale: 16.0 +2023-03-08 14:29:45,132 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.900e+02 5.817e+02 6.597e+02 8.414e+02 1.857e+03, threshold=1.319e+03, percent-clipped=1.0 +2023-03-08 14:30:01,033 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5538, 4.3906, 4.6135, 4.5936, 4.4225, 4.1996, 4.8270, 4.5020], + device='cuda:2'), covar=tensor([0.0181, 0.0316, 0.0252, 0.0230, 0.0256, 0.0339, 0.0213, 0.0264], + device='cuda:2'), in_proj_covar=tensor([0.0053, 0.0052, 0.0054, 0.0052, 0.0056, 0.0062, 0.0053, 0.0050], + device='cuda:2'), out_proj_covar=tensor([5.3885e-05, 4.8899e-05, 5.3992e-05, 4.9967e-05, 5.9582e-05, 7.1143e-05, + 5.4459e-05, 4.7250e-05], device='cuda:2') +2023-03-08 14:30:04,884 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.99 vs. limit=2.0 +2023-03-08 14:30:22,492 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2746.0, num_to_drop=2, layers_to_drop={1, 2} +2023-03-08 14:30:27,316 INFO [train.py:898] (2/4) Epoch 1, batch 2750, loss[loss=0.4075, simple_loss=0.4353, pruned_loss=0.1899, over 18622.00 frames. ], tot_loss[loss=0.392, simple_loss=0.4236, pruned_loss=0.1803, over 3564403.24 frames. ], batch size: 52, lr: 4.68e-02, grad_scale: 16.0 +2023-03-08 14:30:57,124 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5953, 2.9520, 3.2377, 3.8452, 3.6203, 3.2524, 3.5587, 3.5671], + device='cuda:2'), covar=tensor([0.0490, 0.1488, 0.5054, 0.0412, 0.0500, 0.0567, 0.0423, 0.1373], + device='cuda:2'), in_proj_covar=tensor([0.0067, 0.0066, 0.0065, 0.0054, 0.0061, 0.0068, 0.0064, 0.0049], + device='cuda:2'), out_proj_covar=tensor([5.3617e-05, 5.7946e-05, 6.6286e-05, 4.5969e-05, 5.1278e-05, 5.5136e-05, + 5.0102e-05, 5.3436e-05], device='cuda:2') +2023-03-08 14:31:22,077 INFO [train.py:898] (2/4) Epoch 1, batch 2800, loss[loss=0.3858, simple_loss=0.4256, pruned_loss=0.173, over 18349.00 frames. ], tot_loss[loss=0.3893, simple_loss=0.4216, pruned_loss=0.1786, over 3566084.62 frames. ], batch size: 56, lr: 4.67e-02, grad_scale: 16.0 +2023-03-08 14:31:35,367 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.828e+02 5.568e+02 7.020e+02 9.459e+02 2.422e+03, threshold=1.404e+03, percent-clipped=9.0 +2023-03-08 14:31:47,574 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2824.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:32:15,900 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0913, 4.8384, 4.8442, 4.7504, 4.7478, 4.6571, 5.3343, 5.1825], + device='cuda:2'), covar=tensor([0.0145, 0.0243, 0.0262, 0.0229, 0.0236, 0.0258, 0.0155, 0.0145], + device='cuda:2'), in_proj_covar=tensor([0.0053, 0.0052, 0.0052, 0.0052, 0.0055, 0.0060, 0.0052, 0.0048], + device='cuda:2'), out_proj_covar=tensor([5.6484e-05, 5.1351e-05, 5.4584e-05, 5.1514e-05, 6.2364e-05, 7.2572e-05, + 5.6082e-05, 4.7055e-05], device='cuda:2') +2023-03-08 14:32:16,545 INFO [train.py:898] (2/4) Epoch 1, batch 2850, loss[loss=0.3332, simple_loss=0.3714, pruned_loss=0.1475, over 18263.00 frames. ], tot_loss[loss=0.3842, simple_loss=0.4178, pruned_loss=0.1754, over 3574525.37 frames. ], batch size: 45, lr: 4.66e-02, grad_scale: 16.0 +2023-03-08 14:32:27,384 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.65 vs. limit=2.0 +2023-03-08 14:32:51,782 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.07 vs. limit=2.0 +2023-03-08 14:32:53,688 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2885.0, num_to_drop=2, layers_to_drop={0, 1} +2023-03-08 14:33:10,881 INFO [train.py:898] (2/4) Epoch 1, batch 2900, loss[loss=0.3354, simple_loss=0.3786, pruned_loss=0.1461, over 18281.00 frames. ], tot_loss[loss=0.3795, simple_loss=0.4143, pruned_loss=0.1724, over 3587985.46 frames. ], batch size: 49, lr: 4.65e-02, grad_scale: 16.0 +2023-03-08 14:33:23,641 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.949e+02 5.658e+02 6.946e+02 8.980e+02 2.248e+03, threshold=1.389e+03, percent-clipped=2.0 +2023-03-08 14:33:50,285 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2937.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:34:05,708 INFO [train.py:898] (2/4) Epoch 1, batch 2950, loss[loss=0.3719, simple_loss=0.4157, pruned_loss=0.1641, over 18610.00 frames. ], tot_loss[loss=0.3751, simple_loss=0.4115, pruned_loss=0.1694, over 3588137.91 frames. ], batch size: 52, lr: 4.64e-02, grad_scale: 16.0 +2023-03-08 14:34:25,988 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0103, 3.6926, 3.4778, 3.2195, 2.8499, 3.0444, 3.2356, 3.3965], + device='cuda:2'), covar=tensor([0.0671, 0.0364, 0.0269, 0.0336, 0.0977, 0.0645, 0.0398, 0.0099], + device='cuda:2'), in_proj_covar=tensor([0.0019, 0.0013, 0.0011, 0.0015, 0.0022, 0.0016, 0.0015, 0.0011], + device='cuda:2'), out_proj_covar=tensor([2.0132e-05, 1.4048e-05, 1.0725e-05, 1.5117e-05, 2.4069e-05, 1.7657e-05, + 1.5518e-05, 9.5385e-06], device='cuda:2') +2023-03-08 14:34:54,917 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2996.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:34:57,614 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2998.0, num_to_drop=2, layers_to_drop={1, 3} +2023-03-08 14:35:00,656 INFO [train.py:898] (2/4) Epoch 1, batch 3000, loss[loss=0.3591, simple_loss=0.3934, pruned_loss=0.1624, over 18258.00 frames. ], tot_loss[loss=0.3733, simple_loss=0.4105, pruned_loss=0.1681, over 3582639.80 frames. ], batch size: 45, lr: 4.63e-02, grad_scale: 8.0 +2023-03-08 14:35:00,657 INFO [train.py:923] (2/4) Computing validation loss +2023-03-08 14:35:07,077 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9419, 5.0215, 4.6844, 4.7972, 4.8450, 5.1528, 4.6614, 4.9441], + device='cuda:2'), covar=tensor([0.0516, 0.0467, 0.0757, 0.0514, 0.0718, 0.0524, 0.0758, 0.0898], + device='cuda:2'), in_proj_covar=tensor([0.0133, 0.0139, 0.0117, 0.0115, 0.0145, 0.0136, 0.0116, 0.0155], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-08 14:35:12,696 INFO [train.py:932] (2/4) Epoch 1, validation: loss=0.2954, simple_loss=0.387, pruned_loss=0.102, over 944034.00 frames. +2023-03-08 14:35:12,697 INFO [train.py:933] (2/4) Maximum memory allocated so far is 18048MB +2023-03-08 14:35:26,915 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.611e+02 5.968e+02 7.272e+02 9.035e+02 2.166e+03, threshold=1.454e+03, percent-clipped=4.0 +2023-03-08 14:35:32,492 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3019.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:35:57,659 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3041.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:36:00,950 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3044.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:36:08,340 INFO [train.py:898] (2/4) Epoch 1, batch 3050, loss[loss=0.3413, simple_loss=0.393, pruned_loss=0.1448, over 18364.00 frames. ], tot_loss[loss=0.3716, simple_loss=0.4093, pruned_loss=0.167, over 3584927.80 frames. ], batch size: 50, lr: 4.62e-02, grad_scale: 8.0 +2023-03-08 14:36:18,073 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0841, 2.8841, 2.2716, 3.2621, 3.5284, 3.7586, 2.1368, 3.3012], + device='cuda:2'), covar=tensor([0.0170, 0.0616, 0.0925, 0.0324, 0.0270, 0.0234, 0.0883, 0.0287], + device='cuda:2'), in_proj_covar=tensor([0.0020, 0.0028, 0.0028, 0.0020, 0.0021, 0.0021, 0.0026, 0.0022], + device='cuda:2'), out_proj_covar=tensor([1.6495e-05, 2.5454e-05, 2.5660e-05, 1.8449e-05, 1.6560e-05, 1.6356e-05, + 2.4220e-05, 1.9047e-05], device='cuda:2') +2023-03-08 14:36:41,047 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3080.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-08 14:37:04,043 INFO [train.py:898] (2/4) Epoch 1, batch 3100, loss[loss=0.4527, simple_loss=0.4602, pruned_loss=0.2226, over 16143.00 frames. ], tot_loss[loss=0.3677, simple_loss=0.4071, pruned_loss=0.1641, over 3587281.11 frames. ], batch size: 94, lr: 4.61e-02, grad_scale: 8.0 +2023-03-08 14:37:18,601 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.360e+02 5.789e+02 7.194e+02 8.721e+02 2.161e+03, threshold=1.439e+03, percent-clipped=3.0 +2023-03-08 14:37:27,589 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.34 vs. limit=2.0 +2023-03-08 14:37:59,565 INFO [train.py:898] (2/4) Epoch 1, batch 3150, loss[loss=0.3876, simple_loss=0.4221, pruned_loss=0.1766, over 17112.00 frames. ], tot_loss[loss=0.3662, simple_loss=0.4063, pruned_loss=0.1631, over 3590937.42 frames. ], batch size: 78, lr: 4.60e-02, grad_scale: 8.0 +2023-03-08 14:38:14,798 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.82 vs. limit=5.0 +2023-03-08 14:38:31,220 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3180.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:38:54,779 INFO [train.py:898] (2/4) Epoch 1, batch 3200, loss[loss=0.3103, simple_loss=0.3559, pruned_loss=0.1323, over 18143.00 frames. ], tot_loss[loss=0.3655, simple_loss=0.4061, pruned_loss=0.1624, over 3583203.81 frames. ], batch size: 44, lr: 4.59e-02, grad_scale: 8.0 +2023-03-08 14:39:08,070 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 6.227e+02 7.762e+02 9.563e+02 2.131e+03, threshold=1.552e+03, percent-clipped=3.0 +2023-03-08 14:39:49,491 INFO [train.py:898] (2/4) Epoch 1, batch 3250, loss[loss=0.3602, simple_loss=0.4092, pruned_loss=0.1556, over 18494.00 frames. ], tot_loss[loss=0.3642, simple_loss=0.4052, pruned_loss=0.1616, over 3589385.09 frames. ], batch size: 51, lr: 4.58e-02, grad_scale: 8.0 +2023-03-08 14:40:30,658 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3288.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:40:35,817 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3293.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:40:45,153 INFO [train.py:898] (2/4) Epoch 1, batch 3300, loss[loss=0.3508, simple_loss=0.4017, pruned_loss=0.15, over 18411.00 frames. ], tot_loss[loss=0.3603, simple_loss=0.4022, pruned_loss=0.1592, over 3595630.82 frames. ], batch size: 52, lr: 4.57e-02, grad_scale: 8.0 +2023-03-08 14:40:58,798 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.218e+02 5.885e+02 6.635e+02 8.607e+02 2.408e+03, threshold=1.327e+03, percent-clipped=3.0 +2023-03-08 14:41:24,336 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.2019, 5.1202, 5.1144, 4.8955, 4.6331, 4.9145, 4.7269, 4.8328], + device='cuda:2'), covar=tensor([0.0419, 0.0255, 0.0174, 0.0209, 0.0539, 0.0225, 0.0394, 0.0257], + device='cuda:2'), in_proj_covar=tensor([0.0072, 0.0086, 0.0076, 0.0072, 0.0085, 0.0087, 0.0092, 0.0083], + device='cuda:2'), out_proj_covar=tensor([9.1113e-05, 1.0210e-04, 8.3944e-05, 1.0091e-04, 1.2169e-04, 1.1293e-04, + 1.3353e-04, 1.0639e-04], device='cuda:2') +2023-03-08 14:41:29,300 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3341.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:41:38,139 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3349.0, num_to_drop=2, layers_to_drop={0, 2} +2023-03-08 14:41:39,920 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8877, 4.3539, 4.2088, 3.9140, 4.4117, 3.6987, 3.7864, 3.9649], + device='cuda:2'), covar=tensor([0.0430, 0.0204, 0.0157, 0.0240, 0.0096, 0.0440, 0.0329, 0.0338], + device='cuda:2'), in_proj_covar=tensor([0.0026, 0.0022, 0.0019, 0.0021, 0.0014, 0.0022, 0.0017, 0.0022], + device='cuda:2'), out_proj_covar=tensor([1.5867e-05, 1.3703e-05, 1.0546e-05, 1.1968e-05, 7.6739e-06, 1.4148e-05, + 9.6418e-06, 1.3413e-05], device='cuda:2') +2023-03-08 14:41:40,520 INFO [train.py:898] (2/4) Epoch 1, batch 3350, loss[loss=0.3874, simple_loss=0.4274, pruned_loss=0.1737, over 17247.00 frames. ], tot_loss[loss=0.3553, simple_loss=0.3987, pruned_loss=0.156, over 3603371.43 frames. ], batch size: 78, lr: 4.56e-02, grad_scale: 8.0 +2023-03-08 14:41:51,127 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=6.28 vs. limit=5.0 +2023-03-08 14:42:03,862 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.89 vs. limit=2.0 +2023-03-08 14:42:06,611 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3375.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:42:17,342 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.91 vs. limit=2.0 +2023-03-08 14:42:22,286 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3389.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:42:36,013 INFO [train.py:898] (2/4) Epoch 1, batch 3400, loss[loss=0.2787, simple_loss=0.3339, pruned_loss=0.1118, over 18342.00 frames. ], tot_loss[loss=0.352, simple_loss=0.396, pruned_loss=0.154, over 3594333.32 frames. ], batch size: 46, lr: 4.55e-02, grad_scale: 8.0 +2023-03-08 14:42:50,975 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.931e+02 5.703e+02 6.610e+02 8.336e+02 1.336e+03, threshold=1.322e+03, percent-clipped=1.0 +2023-03-08 14:42:51,615 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9291, 4.9264, 3.7675, 4.8399, 5.0779, 4.1957, 4.8482, 4.3834], + device='cuda:2'), covar=tensor([0.0059, 0.0081, 0.0922, 0.0047, 0.0053, 0.0362, 0.0109, 0.0225], + device='cuda:2'), in_proj_covar=tensor([0.0030, 0.0027, 0.0065, 0.0027, 0.0026, 0.0046, 0.0042, 0.0039], + device='cuda:2'), out_proj_covar=tensor([2.1037e-05, 2.0530e-05, 5.7742e-05, 1.8630e-05, 1.8319e-05, 4.1091e-05, + 3.7043e-05, 3.4040e-05], device='cuda:2') +2023-03-08 14:43:31,453 INFO [train.py:898] (2/4) Epoch 1, batch 3450, loss[loss=0.3181, simple_loss=0.3579, pruned_loss=0.1392, over 18166.00 frames. ], tot_loss[loss=0.3521, simple_loss=0.3963, pruned_loss=0.154, over 3585628.13 frames. ], batch size: 44, lr: 4.54e-02, grad_scale: 8.0 +2023-03-08 14:44:04,106 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3480.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:44:27,351 INFO [train.py:898] (2/4) Epoch 1, batch 3500, loss[loss=0.3335, simple_loss=0.3954, pruned_loss=0.1358, over 18403.00 frames. ], tot_loss[loss=0.3498, simple_loss=0.3946, pruned_loss=0.1525, over 3582458.74 frames. ], batch size: 52, lr: 4.53e-02, grad_scale: 8.0 +2023-03-08 14:44:42,685 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.394e+02 6.336e+02 7.785e+02 9.331e+02 2.014e+03, threshold=1.557e+03, percent-clipped=6.0 +2023-03-08 14:44:58,033 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3528.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:45:16,075 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3546.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:45:20,678 INFO [train.py:898] (2/4) Epoch 1, batch 3550, loss[loss=0.3286, simple_loss=0.3698, pruned_loss=0.1437, over 18430.00 frames. ], tot_loss[loss=0.3524, simple_loss=0.3966, pruned_loss=0.1541, over 3580077.38 frames. ], batch size: 43, lr: 4.51e-02, grad_scale: 8.0 +2023-03-08 14:45:55,530 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9106, 4.8997, 4.8517, 4.4479, 4.4599, 4.6177, 4.3907, 4.5849], + device='cuda:2'), covar=tensor([0.0377, 0.0255, 0.0233, 0.0286, 0.0523, 0.0278, 0.0534, 0.0338], + device='cuda:2'), in_proj_covar=tensor([0.0070, 0.0087, 0.0075, 0.0070, 0.0084, 0.0086, 0.0091, 0.0084], + device='cuda:2'), out_proj_covar=tensor([9.4654e-05, 1.1035e-04, 8.7908e-05, 1.0253e-04, 1.2516e-04, 1.1973e-04, + 1.3880e-04, 1.1609e-04], device='cuda:2') +2023-03-08 14:46:04,616 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3593.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:46:12,816 INFO [train.py:898] (2/4) Epoch 1, batch 3600, loss[loss=0.3033, simple_loss=0.359, pruned_loss=0.1238, over 18508.00 frames. ], tot_loss[loss=0.3528, simple_loss=0.3972, pruned_loss=0.1541, over 3565763.23 frames. ], batch size: 47, lr: 4.50e-02, grad_scale: 8.0 +2023-03-08 14:46:17,787 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.00 vs. limit=2.0 +2023-03-08 14:46:19,774 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3607.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-08 14:46:26,691 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.777e+02 7.280e+02 9.972e+02 1.228e+03 2.916e+03, threshold=1.994e+03, percent-clipped=11.0 +2023-03-08 14:46:30,097 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2862, 4.2357, 3.6501, 3.2840, 3.1513, 3.8627, 4.3465, 2.9337], + device='cuda:2'), covar=tensor([0.0222, 0.0090, 0.0149, 0.0177, 0.0544, 0.0216, 0.0063, 0.0453], + device='cuda:2'), in_proj_covar=tensor([0.0027, 0.0023, 0.0025, 0.0025, 0.0040, 0.0024, 0.0022, 0.0036], + device='cuda:2'), out_proj_covar=tensor([2.3596e-05, 1.8460e-05, 2.0651e-05, 2.2360e-05, 3.2801e-05, 1.7999e-05, + 1.8201e-05, 3.0226e-05], device='cuda:2') +2023-03-08 14:46:34,714 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.70 vs. limit=5.0 +2023-03-08 14:46:42,929 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2445, 3.8903, 3.4147, 3.6739, 3.6375, 3.8976, 3.5715, 3.3930], + device='cuda:2'), covar=tensor([0.0330, 0.0087, 0.0163, 0.0121, 0.0104, 0.0089, 0.0210, 0.0221], + device='cuda:2'), in_proj_covar=tensor([0.0032, 0.0021, 0.0025, 0.0020, 0.0024, 0.0021, 0.0026, 0.0027], + device='cuda:2'), out_proj_covar=tensor([5.3162e-05, 3.3099e-05, 4.0189e-05, 3.4545e-05, 3.4470e-05, 3.1140e-05, + 4.0650e-05, 4.2925e-05], device='cuda:2') +2023-03-08 14:47:17,218 INFO [train.py:898] (2/4) Epoch 2, batch 0, loss[loss=0.3083, simple_loss=0.3519, pruned_loss=0.1323, over 18452.00 frames. ], tot_loss[loss=0.3083, simple_loss=0.3519, pruned_loss=0.1323, over 18452.00 frames. ], batch size: 43, lr: 4.41e-02, grad_scale: 8.0 +2023-03-08 14:47:17,218 INFO [train.py:923] (2/4) Computing validation loss +2023-03-08 14:47:29,035 INFO [train.py:932] (2/4) Epoch 2, validation: loss=0.2643, simple_loss=0.3556, pruned_loss=0.08646, over 944034.00 frames. +2023-03-08 14:47:29,036 INFO [train.py:933] (2/4) Maximum memory allocated so far is 18133MB +2023-03-08 14:47:32,846 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.43 vs. limit=2.0 +2023-03-08 14:47:35,866 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3641.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:47:39,194 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3644.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:48:07,659 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.97 vs. limit=2.0 +2023-03-08 14:48:15,964 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3675.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:48:17,226 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3676.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:48:26,901 INFO [train.py:898] (2/4) Epoch 2, batch 50, loss[loss=0.3478, simple_loss=0.4058, pruned_loss=0.1449, over 18392.00 frames. ], tot_loss[loss=0.3392, simple_loss=0.3872, pruned_loss=0.1457, over 797763.93 frames. ], batch size: 52, lr: 4.40e-02, grad_scale: 8.0 +2023-03-08 14:48:39,997 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.49 vs. limit=5.0 +2023-03-08 14:49:00,565 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 4.344e+02 6.253e+02 7.943e+02 9.806e+02 1.695e+03, threshold=1.589e+03, percent-clipped=0.0 +2023-03-08 14:49:11,436 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3723.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:49:25,297 INFO [train.py:898] (2/4) Epoch 2, batch 100, loss[loss=0.3375, simple_loss=0.3937, pruned_loss=0.1406, over 17044.00 frames. ], tot_loss[loss=0.3395, simple_loss=0.3888, pruned_loss=0.1451, over 1406704.39 frames. ], batch size: 78, lr: 4.39e-02, grad_scale: 8.0 +2023-03-08 14:49:27,870 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3737.0, num_to_drop=2, layers_to_drop={2, 3} +2023-03-08 14:50:22,838 INFO [train.py:898] (2/4) Epoch 2, batch 150, loss[loss=0.278, simple_loss=0.3337, pruned_loss=0.1111, over 17651.00 frames. ], tot_loss[loss=0.3387, simple_loss=0.3879, pruned_loss=0.1448, over 1889276.43 frames. ], batch size: 39, lr: 4.38e-02, grad_scale: 8.0 +2023-03-08 14:50:55,229 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.556e+02 6.308e+02 7.748e+02 1.009e+03 1.866e+03, threshold=1.550e+03, percent-clipped=3.0 +2023-03-08 14:51:21,243 INFO [train.py:898] (2/4) Epoch 2, batch 200, loss[loss=0.2996, simple_loss=0.3676, pruned_loss=0.1158, over 18384.00 frames. ], tot_loss[loss=0.3359, simple_loss=0.385, pruned_loss=0.1434, over 2259179.62 frames. ], batch size: 50, lr: 4.37e-02, grad_scale: 8.0 +2023-03-08 14:51:32,358 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-03-08 14:52:20,119 INFO [train.py:898] (2/4) Epoch 2, batch 250, loss[loss=0.3335, simple_loss=0.3903, pruned_loss=0.1383, over 18623.00 frames. ], tot_loss[loss=0.3358, simple_loss=0.3848, pruned_loss=0.1434, over 2559246.53 frames. ], batch size: 52, lr: 4.36e-02, grad_scale: 8.0 +2023-03-08 14:52:29,036 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.00 vs. limit=2.0 +2023-03-08 14:52:39,898 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3902.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:52:53,093 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.269e+02 6.436e+02 8.265e+02 1.082e+03 2.310e+03, threshold=1.653e+03, percent-clipped=6.0 +2023-03-08 14:52:57,971 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3114, 3.1527, 2.4408, 2.9116, 3.0882, 3.3014, 2.7133, 2.9204], + device='cuda:2'), covar=tensor([0.0573, 0.0505, 0.0756, 0.0411, 0.1236, 0.0583, 0.0376, 0.0328], + device='cuda:2'), in_proj_covar=tensor([0.0042, 0.0038, 0.0065, 0.0054, 0.0049, 0.0039, 0.0043, 0.0050], + device='cuda:2'), out_proj_covar=tensor([5.2660e-05, 5.0969e-05, 6.9551e-05, 5.3643e-05, 5.9717e-05, 4.6744e-05, + 4.5564e-05, 4.8548e-05], device='cuda:2') +2023-03-08 14:53:08,359 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3926.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:53:18,703 INFO [train.py:898] (2/4) Epoch 2, batch 300, loss[loss=0.3065, simple_loss=0.3578, pruned_loss=0.1276, over 18159.00 frames. ], tot_loss[loss=0.3347, simple_loss=0.3845, pruned_loss=0.1425, over 2787577.72 frames. ], batch size: 44, lr: 4.35e-02, grad_scale: 8.0 +2023-03-08 14:53:29,600 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3944.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:53:55,051 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.87 vs. limit=2.0 +2023-03-08 14:54:16,002 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.07 vs. limit=5.0 +2023-03-08 14:54:17,747 INFO [train.py:898] (2/4) Epoch 2, batch 350, loss[loss=0.3621, simple_loss=0.412, pruned_loss=0.156, over 18125.00 frames. ], tot_loss[loss=0.3321, simple_loss=0.3825, pruned_loss=0.1409, over 2968562.42 frames. ], batch size: 62, lr: 4.34e-02, grad_scale: 8.0 +2023-03-08 14:54:20,341 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3987.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:54:26,777 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=3992.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:54:56,247 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.948e+02 5.340e+02 6.954e+02 8.950e+02 2.037e+03, threshold=1.391e+03, percent-clipped=3.0 +2023-03-08 14:55:18,857 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4032.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 14:55:22,116 INFO [train.py:898] (2/4) Epoch 2, batch 400, loss[loss=0.2768, simple_loss=0.3278, pruned_loss=0.1129, over 18449.00 frames. ], tot_loss[loss=0.3285, simple_loss=0.3792, pruned_loss=0.1389, over 3119979.90 frames. ], batch size: 43, lr: 4.33e-02, grad_scale: 8.0 +2023-03-08 14:55:52,199 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-08 14:56:22,110 INFO [train.py:898] (2/4) Epoch 2, batch 450, loss[loss=0.2881, simple_loss=0.3399, pruned_loss=0.1182, over 18352.00 frames. ], tot_loss[loss=0.3268, simple_loss=0.3777, pruned_loss=0.1379, over 3230273.62 frames. ], batch size: 46, lr: 4.31e-02, grad_scale: 8.0 +2023-03-08 14:56:35,025 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.45 vs. limit=5.0 +2023-03-08 14:56:56,421 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.730e+02 6.143e+02 7.834e+02 1.006e+03 1.697e+03, threshold=1.567e+03, percent-clipped=3.0 +2023-03-08 14:57:10,920 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.41 vs. limit=5.0 +2023-03-08 14:57:17,697 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4532, 3.8384, 4.1608, 3.5168, 4.1667, 4.0992, 4.0059, 3.6022], + device='cuda:2'), covar=tensor([0.0717, 0.0347, 0.0143, 0.0413, 0.0104, 0.0362, 0.0189, 0.0571], + device='cuda:2'), in_proj_covar=tensor([0.0034, 0.0027, 0.0023, 0.0028, 0.0017, 0.0032, 0.0020, 0.0032], + device='cuda:2'), out_proj_covar=tensor([2.2413e-05, 1.7645e-05, 1.4007e-05, 1.7717e-05, 9.9439e-06, 2.1919e-05, + 1.2436e-05, 2.0460e-05], device='cuda:2') +2023-03-08 14:57:21,303 INFO [train.py:898] (2/4) Epoch 2, batch 500, loss[loss=0.3138, simple_loss=0.357, pruned_loss=0.1353, over 18513.00 frames. ], tot_loss[loss=0.3261, simple_loss=0.3771, pruned_loss=0.1376, over 3305603.21 frames. ], batch size: 47, lr: 4.30e-02, grad_scale: 8.0 +2023-03-08 14:57:28,820 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4141.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:58:19,897 INFO [train.py:898] (2/4) Epoch 2, batch 550, loss[loss=0.2686, simple_loss=0.3266, pruned_loss=0.1053, over 18189.00 frames. ], tot_loss[loss=0.3267, simple_loss=0.3775, pruned_loss=0.138, over 3366240.79 frames. ], batch size: 44, lr: 4.29e-02, grad_scale: 8.0 +2023-03-08 14:58:29,539 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5269, 4.3046, 2.9510, 3.9770, 4.4682, 2.5478, 3.9667, 3.7400], + device='cuda:2'), covar=tensor([0.0085, 0.0091, 0.2076, 0.0153, 0.0097, 0.1106, 0.0410, 0.0570], + device='cuda:2'), in_proj_covar=tensor([0.0049, 0.0043, 0.0119, 0.0049, 0.0042, 0.0084, 0.0076, 0.0071], + device='cuda:2'), out_proj_covar=tensor([3.8789e-05, 3.6474e-05, 1.0675e-04, 3.8687e-05, 3.3459e-05, 7.6474e-05, + 7.0546e-05, 6.8095e-05], device='cuda:2') +2023-03-08 14:58:42,159 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4202.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:58:42,244 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4202.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 14:58:56,105 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.937e+02 6.401e+02 7.607e+02 9.685e+02 1.732e+03, threshold=1.521e+03, percent-clipped=4.0 +2023-03-08 14:59:20,553 INFO [train.py:898] (2/4) Epoch 2, batch 600, loss[loss=0.3298, simple_loss=0.3813, pruned_loss=0.1391, over 18614.00 frames. ], tot_loss[loss=0.3263, simple_loss=0.3778, pruned_loss=0.1374, over 3419190.15 frames. ], batch size: 52, lr: 4.28e-02, grad_scale: 8.0 +2023-03-08 14:59:35,227 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7712, 3.7246, 4.1576, 3.6215, 2.1712, 3.7851, 4.1393, 2.3588], + device='cuda:2'), covar=tensor([0.0105, 0.0116, 0.0065, 0.0107, 0.0695, 0.0095, 0.0072, 0.0563], + device='cuda:2'), in_proj_covar=tensor([0.0033, 0.0030, 0.0030, 0.0030, 0.0054, 0.0030, 0.0026, 0.0049], + device='cuda:2'), out_proj_covar=tensor([3.1279e-05, 2.5464e-05, 2.6033e-05, 2.8439e-05, 4.7517e-05, 2.4509e-05, + 2.3663e-05, 4.4777e-05], device='cuda:2') +2023-03-08 14:59:40,187 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4250.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 14:59:55,894 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5441, 4.3298, 4.0564, 3.8877, 2.7374, 3.1216, 3.6419, 3.9693], + device='cuda:2'), covar=tensor([0.0503, 0.0219, 0.0065, 0.0158, 0.0689, 0.0632, 0.0151, 0.0045], + device='cuda:2'), in_proj_covar=tensor([0.0043, 0.0027, 0.0021, 0.0032, 0.0049, 0.0053, 0.0033, 0.0022], + device='cuda:2'), out_proj_covar=tensor([5.2433e-05, 3.5250e-05, 2.2841e-05, 3.7191e-05, 5.6529e-05, 6.2125e-05, + 3.8931e-05, 2.3331e-05], device='cuda:2') +2023-03-08 15:00:17,452 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4282.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:00:20,595 INFO [train.py:898] (2/4) Epoch 2, batch 650, loss[loss=0.2762, simple_loss=0.332, pruned_loss=0.1103, over 18394.00 frames. ], tot_loss[loss=0.3257, simple_loss=0.3773, pruned_loss=0.1371, over 3445749.10 frames. ], batch size: 42, lr: 4.27e-02, grad_scale: 8.0 +2023-03-08 15:00:24,382 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4288.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:00:55,826 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.352e+02 6.066e+02 7.398e+02 9.036e+02 1.375e+03, threshold=1.480e+03, percent-clipped=0.0 +2023-03-08 15:01:02,538 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0372, 2.9994, 2.2097, 2.7165, 2.9998, 3.1679, 2.3624, 2.8916], + device='cuda:2'), covar=tensor([0.0547, 0.0649, 0.0962, 0.0491, 0.1043, 0.0437, 0.0576, 0.0333], + device='cuda:2'), in_proj_covar=tensor([0.0049, 0.0043, 0.0072, 0.0060, 0.0053, 0.0039, 0.0048, 0.0054], + device='cuda:2'), out_proj_covar=tensor([6.2796e-05, 5.8530e-05, 8.3390e-05, 6.4160e-05, 6.8832e-05, 4.6941e-05, + 5.3957e-05, 5.5213e-05], device='cuda:2') +2023-03-08 15:01:15,269 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4925, 3.2385, 3.2389, 2.9884, 2.7413, 2.7362, 2.9311, 2.4691], + device='cuda:2'), covar=tensor([0.0388, 0.0165, 0.0141, 0.0145, 0.0219, 0.0304, 0.0167, 0.0277], + device='cuda:2'), in_proj_covar=tensor([0.0028, 0.0026, 0.0026, 0.0026, 0.0034, 0.0026, 0.0029, 0.0031], + device='cuda:2'), out_proj_covar=tensor([6.1126e-05, 6.7658e-05, 5.0198e-05, 5.6054e-05, 7.2334e-05, 5.6414e-05, + 5.5261e-05, 6.5883e-05], device='cuda:2') +2023-03-08 15:01:17,591 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4332.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 15:01:20,629 INFO [train.py:898] (2/4) Epoch 2, batch 700, loss[loss=0.3365, simple_loss=0.3896, pruned_loss=0.1417, over 18308.00 frames. ], tot_loss[loss=0.3249, simple_loss=0.3765, pruned_loss=0.1366, over 3461795.45 frames. ], batch size: 57, lr: 4.26e-02, grad_scale: 8.0 +2023-03-08 15:01:37,865 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4349.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:02:14,424 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4380.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 15:02:19,780 INFO [train.py:898] (2/4) Epoch 2, batch 750, loss[loss=0.3381, simple_loss=0.3775, pruned_loss=0.1493, over 18261.00 frames. ], tot_loss[loss=0.3248, simple_loss=0.3764, pruned_loss=0.1365, over 3484385.09 frames. ], batch size: 45, lr: 4.25e-02, grad_scale: 8.0 +2023-03-08 15:02:54,754 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.989e+02 6.069e+02 8.253e+02 1.039e+03 2.142e+03, threshold=1.651e+03, percent-clipped=4.0 +2023-03-08 15:03:19,452 INFO [train.py:898] (2/4) Epoch 2, batch 800, loss[loss=0.2803, simple_loss=0.3478, pruned_loss=0.1064, over 18478.00 frames. ], tot_loss[loss=0.3248, simple_loss=0.3766, pruned_loss=0.1366, over 3518608.51 frames. ], batch size: 51, lr: 4.24e-02, grad_scale: 8.0 +2023-03-08 15:03:56,394 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.10 vs. limit=2.0 +2023-03-08 15:04:19,439 INFO [train.py:898] (2/4) Epoch 2, batch 850, loss[loss=0.3223, simple_loss=0.3849, pruned_loss=0.1299, over 18462.00 frames. ], tot_loss[loss=0.323, simple_loss=0.3756, pruned_loss=0.1352, over 3547347.49 frames. ], batch size: 59, lr: 4.23e-02, grad_scale: 8.0 +2023-03-08 15:04:23,325 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.94 vs. limit=5.0 +2023-03-08 15:04:33,250 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2772, 5.2197, 5.2644, 5.0713, 5.1085, 5.7193, 5.3006, 5.0788], + device='cuda:2'), covar=tensor([0.0538, 0.0548, 0.0540, 0.0463, 0.1015, 0.0520, 0.0516, 0.1591], + device='cuda:2'), in_proj_covar=tensor([0.0155, 0.0132, 0.0130, 0.0117, 0.0168, 0.0165, 0.0114, 0.0175], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002], + device='cuda:2') +2023-03-08 15:04:33,264 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4497.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:04:37,878 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0883, 4.2472, 4.3467, 3.7302, 3.6277, 2.7826, 3.1690, 2.7304], + device='cuda:2'), covar=tensor([0.0317, 0.0186, 0.0098, 0.0189, 0.0229, 0.0380, 0.0292, 0.0375], + device='cuda:2'), in_proj_covar=tensor([0.0026, 0.0025, 0.0026, 0.0025, 0.0033, 0.0024, 0.0028, 0.0030], + device='cuda:2'), out_proj_covar=tensor([6.0876e-05, 6.8938e-05, 4.9927e-05, 5.7573e-05, 7.3466e-05, 5.4433e-05, + 5.6025e-05, 6.6547e-05], device='cuda:2') +2023-03-08 15:04:53,488 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.476e+02 6.096e+02 7.477e+02 9.236e+02 1.546e+03, threshold=1.495e+03, percent-clipped=0.0 +2023-03-08 15:05:18,309 INFO [train.py:898] (2/4) Epoch 2, batch 900, loss[loss=0.2415, simple_loss=0.3029, pruned_loss=0.09001, over 18472.00 frames. ], tot_loss[loss=0.3193, simple_loss=0.373, pruned_loss=0.1329, over 3570689.68 frames. ], batch size: 43, lr: 4.22e-02, grad_scale: 8.0 +2023-03-08 15:06:09,708 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5902, 4.4789, 4.6066, 4.5899, 4.6171, 4.3944, 4.9803, 4.7865], + device='cuda:2'), covar=tensor([0.0125, 0.0187, 0.0183, 0.0128, 0.0164, 0.0143, 0.0127, 0.0134], + device='cuda:2'), in_proj_covar=tensor([0.0060, 0.0050, 0.0049, 0.0053, 0.0055, 0.0061, 0.0054, 0.0049], + device='cuda:2'), out_proj_covar=tensor([1.1702e-04, 8.2725e-05, 8.2606e-05, 8.3318e-05, 1.0154e-04, 1.1487e-04, + 9.2950e-05, 8.0020e-05], device='cuda:2') +2023-03-08 15:06:14,698 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4582.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:06:17,754 INFO [train.py:898] (2/4) Epoch 2, batch 950, loss[loss=0.3294, simple_loss=0.3849, pruned_loss=0.137, over 18355.00 frames. ], tot_loss[loss=0.3202, simple_loss=0.3728, pruned_loss=0.1337, over 3574124.59 frames. ], batch size: 56, lr: 4.21e-02, grad_scale: 8.0 +2023-03-08 15:06:52,449 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.369e+02 6.106e+02 7.676e+02 9.311e+02 1.838e+03, threshold=1.535e+03, percent-clipped=6.0 +2023-03-08 15:07:02,424 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.98 vs. limit=2.0 +2023-03-08 15:07:05,822 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.13 vs. limit=2.0 +2023-03-08 15:07:12,184 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4630.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:07:17,303 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2858, 5.0636, 4.5189, 5.3338, 5.2039, 4.7136, 5.1525, 4.5447], + device='cuda:2'), covar=tensor([0.0225, 0.0285, 0.1469, 0.0320, 0.0186, 0.0345, 0.0248, 0.0444], + device='cuda:2'), in_proj_covar=tensor([0.0157, 0.0167, 0.0257, 0.0142, 0.0142, 0.0175, 0.0169, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 15:07:18,134 INFO [train.py:898] (2/4) Epoch 2, batch 1000, loss[loss=0.3155, simple_loss=0.3698, pruned_loss=0.1306, over 18478.00 frames. ], tot_loss[loss=0.3191, simple_loss=0.3726, pruned_loss=0.1329, over 3588864.53 frames. ], batch size: 51, lr: 4.20e-02, grad_scale: 8.0 +2023-03-08 15:07:28,739 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4644.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:07:40,474 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.4490, 4.4151, 4.6041, 4.5166, 4.4806, 4.2279, 4.8244, 4.6676], + device='cuda:2'), covar=tensor([0.0101, 0.0147, 0.0113, 0.0097, 0.0123, 0.0138, 0.0089, 0.0118], + device='cuda:2'), in_proj_covar=tensor([0.0059, 0.0051, 0.0049, 0.0054, 0.0054, 0.0061, 0.0053, 0.0049], + device='cuda:2'), out_proj_covar=tensor([1.1747e-04, 8.5564e-05, 8.3325e-05, 8.6432e-05, 1.0055e-04, 1.1790e-04, + 9.2354e-05, 8.1247e-05], device='cuda:2') +2023-03-08 15:07:55,107 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.79 vs. limit=2.0 +2023-03-08 15:08:05,512 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.01 vs. limit=2.0 +2023-03-08 15:08:17,970 INFO [train.py:898] (2/4) Epoch 2, batch 1050, loss[loss=0.3602, simple_loss=0.4129, pruned_loss=0.1537, over 18276.00 frames. ], tot_loss[loss=0.3181, simple_loss=0.3718, pruned_loss=0.1322, over 3579381.36 frames. ], batch size: 57, lr: 4.19e-02, grad_scale: 8.0 +2023-03-08 15:08:52,051 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.788e+02 5.479e+02 6.722e+02 8.127e+02 1.317e+03, threshold=1.344e+03, percent-clipped=0.0 +2023-03-08 15:09:08,773 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.65 vs. limit=5.0 +2023-03-08 15:09:17,314 INFO [train.py:898] (2/4) Epoch 2, batch 1100, loss[loss=0.3365, simple_loss=0.3885, pruned_loss=0.1423, over 18347.00 frames. ], tot_loss[loss=0.3172, simple_loss=0.3715, pruned_loss=0.1314, over 3584655.77 frames. ], batch size: 56, lr: 4.18e-02, grad_scale: 4.0 +2023-03-08 15:09:30,877 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5659, 3.5776, 3.6154, 3.5923, 1.9257, 3.6314, 4.1222, 2.5501], + device='cuda:2'), covar=tensor([0.0147, 0.0153, 0.0108, 0.0157, 0.0965, 0.0137, 0.0066, 0.0626], + device='cuda:2'), in_proj_covar=tensor([0.0040, 0.0036, 0.0037, 0.0035, 0.0068, 0.0036, 0.0028, 0.0064], + device='cuda:2'), out_proj_covar=tensor([3.8727e-05, 3.2418e-05, 3.4736e-05, 3.4152e-05, 6.2143e-05, 3.0426e-05, + 2.8044e-05, 6.0522e-05], device='cuda:2') +2023-03-08 15:10:01,886 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0246, 3.3905, 2.2827, 3.3753, 3.1320, 4.3540, 2.1473, 3.6987], + device='cuda:2'), covar=tensor([0.0133, 0.0772, 0.1630, 0.0373, 0.0591, 0.0092, 0.1507, 0.0280], + device='cuda:2'), in_proj_covar=tensor([0.0052, 0.0084, 0.0083, 0.0042, 0.0069, 0.0043, 0.0081, 0.0069], + device='cuda:2'), out_proj_covar=tensor([4.5183e-05, 8.9419e-05, 9.3042e-05, 5.1130e-05, 6.7937e-05, 3.9129e-05, + 8.2316e-05, 6.6088e-05], device='cuda:2') +2023-03-08 15:10:17,508 INFO [train.py:898] (2/4) Epoch 2, batch 1150, loss[loss=0.3555, simple_loss=0.4005, pruned_loss=0.1553, over 18015.00 frames. ], tot_loss[loss=0.3165, simple_loss=0.3707, pruned_loss=0.1312, over 3587020.12 frames. ], batch size: 65, lr: 4.17e-02, grad_scale: 4.0 +2023-03-08 15:10:31,862 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4797.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:10:35,973 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.76 vs. limit=2.0 +2023-03-08 15:10:52,311 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.612e+02 6.045e+02 7.965e+02 9.549e+02 1.896e+03, threshold=1.593e+03, percent-clipped=3.0 +2023-03-08 15:11:02,791 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.02 vs. limit=2.0 +2023-03-08 15:11:16,902 INFO [train.py:898] (2/4) Epoch 2, batch 1200, loss[loss=0.2725, simple_loss=0.3333, pruned_loss=0.1058, over 18513.00 frames. ], tot_loss[loss=0.3154, simple_loss=0.3694, pruned_loss=0.1307, over 3580942.31 frames. ], batch size: 44, lr: 4.16e-02, grad_scale: 8.0 +2023-03-08 15:11:29,239 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4845.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:12:16,084 INFO [train.py:898] (2/4) Epoch 2, batch 1250, loss[loss=0.3087, simple_loss=0.374, pruned_loss=0.1217, over 18310.00 frames. ], tot_loss[loss=0.3147, simple_loss=0.3689, pruned_loss=0.1302, over 3568796.40 frames. ], batch size: 54, lr: 4.15e-02, grad_scale: 8.0 +2023-03-08 15:12:16,981 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0910, 5.0041, 5.0080, 4.6281, 4.6332, 4.6758, 4.4264, 4.7562], + device='cuda:2'), covar=tensor([0.0406, 0.0266, 0.0176, 0.0267, 0.0402, 0.0295, 0.0603, 0.0274], + device='cuda:2'), in_proj_covar=tensor([0.0082, 0.0100, 0.0084, 0.0080, 0.0093, 0.0096, 0.0110, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 15:12:51,972 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.909e+02 5.792e+02 6.888e+02 8.674e+02 1.521e+03, threshold=1.378e+03, percent-clipped=0.0 +2023-03-08 15:13:02,944 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.99 vs. limit=2.0 +2023-03-08 15:13:15,916 INFO [train.py:898] (2/4) Epoch 2, batch 1300, loss[loss=0.33, simple_loss=0.3723, pruned_loss=0.1438, over 18249.00 frames. ], tot_loss[loss=0.3127, simple_loss=0.3673, pruned_loss=0.129, over 3575224.78 frames. ], batch size: 45, lr: 4.14e-02, grad_scale: 8.0 +2023-03-08 15:13:17,374 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4400, 5.2293, 4.6113, 5.4755, 5.3497, 4.6926, 5.2815, 4.5879], + device='cuda:2'), covar=tensor([0.0291, 0.0232, 0.1471, 0.0364, 0.0258, 0.0444, 0.0251, 0.0568], + device='cuda:2'), in_proj_covar=tensor([0.0161, 0.0172, 0.0270, 0.0144, 0.0146, 0.0181, 0.0175, 0.0192], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0004, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 15:13:27,103 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4944.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:13:42,150 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-08 15:14:15,426 INFO [train.py:898] (2/4) Epoch 2, batch 1350, loss[loss=0.2964, simple_loss=0.3648, pruned_loss=0.114, over 18360.00 frames. ], tot_loss[loss=0.3118, simple_loss=0.3669, pruned_loss=0.1284, over 3573121.86 frames. ], batch size: 50, lr: 4.13e-02, grad_scale: 8.0 +2023-03-08 15:14:24,038 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=4992.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:14:40,979 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=7.50 vs. limit=5.0 +2023-03-08 15:14:51,637 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.370e+02 5.891e+02 6.846e+02 8.415e+02 1.418e+03, threshold=1.369e+03, percent-clipped=1.0 +2023-03-08 15:15:02,490 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9475, 3.6215, 4.2271, 3.0336, 3.8581, 3.6144, 3.8978, 3.2815], + device='cuda:2'), covar=tensor([0.0550, 0.0273, 0.0059, 0.0351, 0.0115, 0.0401, 0.0228, 0.0494], + device='cuda:2'), in_proj_covar=tensor([0.0048, 0.0041, 0.0031, 0.0041, 0.0029, 0.0054, 0.0028, 0.0048], + device='cuda:2'), out_proj_covar=tensor([3.5985e-05, 3.1121e-05, 2.2131e-05, 3.0288e-05, 2.2404e-05, 4.0778e-05, + 2.2347e-05, 3.5133e-05], device='cuda:2') +2023-03-08 15:15:15,360 INFO [train.py:898] (2/4) Epoch 2, batch 1400, loss[loss=0.2952, simple_loss=0.3606, pruned_loss=0.1149, over 18401.00 frames. ], tot_loss[loss=0.3115, simple_loss=0.3669, pruned_loss=0.128, over 3576636.47 frames. ], batch size: 52, lr: 4.12e-02, grad_scale: 8.0 +2023-03-08 15:15:54,902 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([1.7867, 4.2464, 4.6590, 3.6275, 2.8442, 2.1035, 3.8770, 4.0908], + device='cuda:2'), covar=tensor([0.0769, 0.0273, 0.0052, 0.0205, 0.0734, 0.1044, 0.0168, 0.0038], + device='cuda:2'), in_proj_covar=tensor([0.0066, 0.0033, 0.0030, 0.0047, 0.0072, 0.0081, 0.0048, 0.0029], + device='cuda:2'), out_proj_covar=tensor([8.9304e-05, 5.1383e-05, 3.7768e-05, 6.1754e-05, 9.0760e-05, 1.0327e-04, + 6.1558e-05, 3.7298e-05], device='cuda:2') +2023-03-08 15:16:14,519 INFO [train.py:898] (2/4) Epoch 2, batch 1450, loss[loss=0.2923, simple_loss=0.3526, pruned_loss=0.116, over 18405.00 frames. ], tot_loss[loss=0.3116, simple_loss=0.3668, pruned_loss=0.1282, over 3570156.38 frames. ], batch size: 48, lr: 4.11e-02, grad_scale: 8.0 +2023-03-08 15:16:50,488 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.583e+02 6.040e+02 7.064e+02 8.729e+02 1.298e+03, threshold=1.413e+03, percent-clipped=0.0 +2023-03-08 15:17:13,677 INFO [train.py:898] (2/4) Epoch 2, batch 1500, loss[loss=0.3285, simple_loss=0.3779, pruned_loss=0.1395, over 18628.00 frames. ], tot_loss[loss=0.313, simple_loss=0.3681, pruned_loss=0.1289, over 3576056.44 frames. ], batch size: 52, lr: 4.10e-02, grad_scale: 8.0 +2023-03-08 15:17:29,153 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7708, 4.6551, 4.8464, 4.7811, 4.5963, 4.4965, 5.1230, 5.1435], + device='cuda:2'), covar=tensor([0.0128, 0.0175, 0.0147, 0.0112, 0.0146, 0.0157, 0.0229, 0.0155], + device='cuda:2'), in_proj_covar=tensor([0.0057, 0.0048, 0.0046, 0.0051, 0.0051, 0.0058, 0.0051, 0.0046], + device='cuda:2'), out_proj_covar=tensor([1.2352e-04, 9.1455e-05, 8.5807e-05, 8.9582e-05, 1.0430e-04, 1.2249e-04, + 9.8572e-05, 8.4928e-05], device='cuda:2') +2023-03-08 15:17:36,969 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-08 15:18:11,877 INFO [train.py:898] (2/4) Epoch 2, batch 1550, loss[loss=0.38, simple_loss=0.4175, pruned_loss=0.1713, over 18341.00 frames. ], tot_loss[loss=0.314, simple_loss=0.3687, pruned_loss=0.1297, over 3568690.21 frames. ], batch size: 55, lr: 4.08e-02, grad_scale: 8.0 +2023-03-08 15:18:19,667 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7769, 4.5344, 4.8779, 4.7809, 4.7395, 4.5643, 5.1224, 5.0000], + device='cuda:2'), covar=tensor([0.0108, 0.0164, 0.0146, 0.0094, 0.0121, 0.0129, 0.0124, 0.0166], + device='cuda:2'), in_proj_covar=tensor([0.0057, 0.0048, 0.0045, 0.0051, 0.0051, 0.0059, 0.0050, 0.0046], + device='cuda:2'), out_proj_covar=tensor([1.2522e-04, 9.1382e-05, 8.4530e-05, 9.1084e-05, 1.0465e-04, 1.2573e-04, + 9.8495e-05, 8.6910e-05], device='cuda:2') +2023-03-08 15:18:48,177 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.1899, 5.2373, 5.2955, 4.9356, 4.8522, 4.9005, 4.6544, 4.8318], + device='cuda:2'), covar=tensor([0.0450, 0.0295, 0.0186, 0.0281, 0.0530, 0.0310, 0.0616, 0.0371], + device='cuda:2'), in_proj_covar=tensor([0.0088, 0.0106, 0.0091, 0.0090, 0.0103, 0.0104, 0.0120, 0.0099], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 15:18:48,959 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.611e+02 6.188e+02 7.756e+02 9.428e+02 1.707e+03, threshold=1.551e+03, percent-clipped=5.0 +2023-03-08 15:19:11,985 INFO [train.py:898] (2/4) Epoch 2, batch 1600, loss[loss=0.3437, simple_loss=0.4014, pruned_loss=0.143, over 18366.00 frames. ], tot_loss[loss=0.3124, simple_loss=0.3676, pruned_loss=0.1286, over 3575681.91 frames. ], batch size: 55, lr: 4.07e-02, grad_scale: 8.0 +2023-03-08 15:19:35,607 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7624, 3.8387, 3.9121, 2.9254, 3.3405, 3.8263, 4.2044, 3.9346], + device='cuda:2'), covar=tensor([0.0381, 0.0294, 0.0137, 0.0795, 0.1361, 0.0078, 0.0194, 0.0201], + device='cuda:2'), in_proj_covar=tensor([0.0061, 0.0061, 0.0046, 0.0064, 0.0113, 0.0047, 0.0052, 0.0056], + device='cuda:2'), out_proj_covar=tensor([3.9071e-05, 4.0027e-05, 2.9894e-05, 4.3142e-05, 7.7583e-05, 2.7786e-05, + 3.1242e-05, 3.5202e-05], device='cuda:2') +2023-03-08 15:19:51,807 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.78 vs. limit=2.0 +2023-03-08 15:20:11,417 INFO [train.py:898] (2/4) Epoch 2, batch 1650, loss[loss=0.2835, simple_loss=0.3366, pruned_loss=0.1151, over 17634.00 frames. ], tot_loss[loss=0.3115, simple_loss=0.3669, pruned_loss=0.128, over 3574867.83 frames. ], batch size: 39, lr: 4.06e-02, grad_scale: 8.0 +2023-03-08 15:20:17,660 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4626, 4.1292, 3.0739, 3.9714, 4.0410, 4.0727, 4.0392, 2.3299], + device='cuda:2'), covar=tensor([0.0302, 0.0098, 0.0327, 0.0119, 0.0096, 0.0125, 0.0172, 0.0897], + device='cuda:2'), in_proj_covar=tensor([0.0038, 0.0024, 0.0037, 0.0026, 0.0031, 0.0026, 0.0030, 0.0048], + device='cuda:2'), out_proj_covar=tensor([9.8877e-05, 6.3128e-05, 9.8784e-05, 7.4644e-05, 7.2023e-05, 6.6048e-05, + 7.3783e-05, 1.1133e-04], device='cuda:2') +2023-03-08 15:20:47,171 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.457e+02 5.814e+02 6.706e+02 8.624e+02 1.400e+03, threshold=1.341e+03, percent-clipped=0.0 +2023-03-08 15:21:10,522 INFO [train.py:898] (2/4) Epoch 2, batch 1700, loss[loss=0.293, simple_loss=0.3608, pruned_loss=0.1126, over 18233.00 frames. ], tot_loss[loss=0.3096, simple_loss=0.3655, pruned_loss=0.1269, over 3565520.44 frames. ], batch size: 60, lr: 4.05e-02, grad_scale: 8.0 +2023-03-08 15:21:10,831 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5374, 4.4200, 3.6827, 4.5185, 4.4498, 4.0172, 4.3919, 3.7627], + device='cuda:2'), covar=tensor([0.0342, 0.0481, 0.2117, 0.0508, 0.0335, 0.0467, 0.0374, 0.0626], + device='cuda:2'), in_proj_covar=tensor([0.0163, 0.0177, 0.0288, 0.0140, 0.0150, 0.0180, 0.0177, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 15:21:40,954 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.87 vs. limit=5.0 +2023-03-08 15:22:10,168 INFO [train.py:898] (2/4) Epoch 2, batch 1750, loss[loss=0.3087, simple_loss=0.3588, pruned_loss=0.1293, over 16014.00 frames. ], tot_loss[loss=0.3077, simple_loss=0.3642, pruned_loss=0.1256, over 3573489.04 frames. ], batch size: 94, lr: 4.04e-02, grad_scale: 8.0 +2023-03-08 15:22:46,217 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.642e+02 5.505e+02 6.727e+02 8.573e+02 1.772e+03, threshold=1.345e+03, percent-clipped=4.0 +2023-03-08 15:23:08,320 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1088, 5.1899, 3.9344, 5.1246, 5.1742, 4.7521, 5.0441, 4.4580], + device='cuda:2'), covar=tensor([0.0635, 0.0511, 0.2920, 0.0820, 0.0417, 0.0479, 0.0496, 0.0697], + device='cuda:2'), in_proj_covar=tensor([0.0172, 0.0183, 0.0300, 0.0148, 0.0151, 0.0182, 0.0186, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 15:23:10,301 INFO [train.py:898] (2/4) Epoch 2, batch 1800, loss[loss=0.2975, simple_loss=0.3621, pruned_loss=0.1164, over 18375.00 frames. ], tot_loss[loss=0.3051, simple_loss=0.3621, pruned_loss=0.1241, over 3582385.92 frames. ], batch size: 50, lr: 4.03e-02, grad_scale: 8.0 +2023-03-08 15:24:09,617 INFO [train.py:898] (2/4) Epoch 2, batch 1850, loss[loss=0.3113, simple_loss=0.3809, pruned_loss=0.1208, over 18625.00 frames. ], tot_loss[loss=0.3055, simple_loss=0.3623, pruned_loss=0.1243, over 3582886.09 frames. ], batch size: 52, lr: 4.02e-02, grad_scale: 8.0 +2023-03-08 15:24:24,522 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.57 vs. limit=2.0 +2023-03-08 15:24:45,051 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.702e+02 5.666e+02 6.741e+02 8.273e+02 2.014e+03, threshold=1.348e+03, percent-clipped=2.0 +2023-03-08 15:25:08,326 INFO [train.py:898] (2/4) Epoch 2, batch 1900, loss[loss=0.3268, simple_loss=0.3833, pruned_loss=0.1352, over 18213.00 frames. ], tot_loss[loss=0.3067, simple_loss=0.3636, pruned_loss=0.1249, over 3592901.97 frames. ], batch size: 60, lr: 4.01e-02, grad_scale: 8.0 +2023-03-08 15:26:01,904 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.06 vs. limit=5.0 +2023-03-08 15:26:07,461 INFO [train.py:898] (2/4) Epoch 2, batch 1950, loss[loss=0.2677, simple_loss=0.336, pruned_loss=0.09969, over 18264.00 frames. ], tot_loss[loss=0.3063, simple_loss=0.3634, pruned_loss=0.1246, over 3585685.27 frames. ], batch size: 47, lr: 4.00e-02, grad_scale: 8.0 +2023-03-08 15:26:19,285 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1883, 3.2979, 4.0524, 2.7534, 3.0221, 4.0443, 3.9326, 3.5181], + device='cuda:2'), covar=tensor([0.0406, 0.0334, 0.0119, 0.0654, 0.1387, 0.0072, 0.0171, 0.0306], + device='cuda:2'), in_proj_covar=tensor([0.0066, 0.0065, 0.0048, 0.0070, 0.0125, 0.0050, 0.0057, 0.0060], + device='cuda:2'), out_proj_covar=tensor([4.4116e-05, 4.4704e-05, 3.2442e-05, 4.9037e-05, 8.7803e-05, 3.0262e-05, + 3.5669e-05, 4.0212e-05], device='cuda:2') +2023-03-08 15:26:38,145 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1917, 2.9690, 2.2914, 2.8118, 3.1250, 2.3861, 2.5565, 3.0439], + device='cuda:2'), covar=tensor([0.0225, 0.0148, 0.0453, 0.0225, 0.0199, 0.0451, 0.0332, 0.0525], + device='cuda:2'), in_proj_covar=tensor([0.0040, 0.0040, 0.0039, 0.0040, 0.0038, 0.0054, 0.0055, 0.0036], + device='cuda:2'), out_proj_covar=tensor([5.0130e-05, 4.9516e-05, 5.7017e-05, 4.7759e-05, 5.0535e-05, 6.6042e-05, + 6.6032e-05, 5.3622e-05], device='cuda:2') +2023-03-08 15:26:42,648 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.871e+02 5.827e+02 7.380e+02 9.024e+02 1.685e+03, threshold=1.476e+03, percent-clipped=2.0 +2023-03-08 15:26:53,833 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.07 vs. limit=2.0 +2023-03-08 15:27:06,886 INFO [train.py:898] (2/4) Epoch 2, batch 2000, loss[loss=0.3264, simple_loss=0.3863, pruned_loss=0.1333, over 18410.00 frames. ], tot_loss[loss=0.3057, simple_loss=0.3631, pruned_loss=0.1242, over 3594157.63 frames. ], batch size: 52, lr: 3.99e-02, grad_scale: 8.0 +2023-03-08 15:27:11,442 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5732, 5.5765, 5.2361, 5.1394, 4.4334, 5.4871, 5.6810, 5.4356], + device='cuda:2'), covar=tensor([0.1945, 0.0900, 0.0637, 0.1076, 0.2568, 0.0733, 0.0672, 0.0855], + device='cuda:2'), in_proj_covar=tensor([0.0246, 0.0218, 0.0172, 0.0226, 0.0306, 0.0212, 0.0215, 0.0196], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 15:27:24,065 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5650.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 15:27:51,952 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.85 vs. limit=2.0 +2023-03-08 15:28:05,536 INFO [train.py:898] (2/4) Epoch 2, batch 2050, loss[loss=0.2721, simple_loss=0.3461, pruned_loss=0.09911, over 18364.00 frames. ], tot_loss[loss=0.3058, simple_loss=0.3629, pruned_loss=0.1244, over 3589035.57 frames. ], batch size: 56, lr: 3.98e-02, grad_scale: 8.0 +2023-03-08 15:28:35,656 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5711.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 15:28:42,462 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.372e+02 5.884e+02 6.946e+02 9.338e+02 2.315e+03, threshold=1.389e+03, percent-clipped=7.0 +2023-03-08 15:29:03,505 INFO [train.py:898] (2/4) Epoch 2, batch 2100, loss[loss=0.2963, simple_loss=0.3573, pruned_loss=0.1177, over 18361.00 frames. ], tot_loss[loss=0.3059, simple_loss=0.3628, pruned_loss=0.1245, over 3578602.63 frames. ], batch size: 55, lr: 3.97e-02, grad_scale: 2.0 +2023-03-08 15:29:11,324 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.56 vs. limit=2.0 +2023-03-08 15:29:27,850 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5755.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:29:40,807 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.06 vs. limit=2.0 +2023-03-08 15:30:02,849 INFO [train.py:898] (2/4) Epoch 2, batch 2150, loss[loss=0.39, simple_loss=0.4182, pruned_loss=0.1809, over 12040.00 frames. ], tot_loss[loss=0.3055, simple_loss=0.3629, pruned_loss=0.1241, over 3561456.51 frames. ], batch size: 129, lr: 3.96e-02, grad_scale: 2.0 +2023-03-08 15:30:05,796 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.74 vs. limit=2.0 +2023-03-08 15:30:27,870 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.17 vs. limit=2.0 +2023-03-08 15:30:40,451 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5816.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:30:41,161 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.344e+02 5.674e+02 6.514e+02 8.678e+02 1.455e+03, threshold=1.303e+03, percent-clipped=1.0 +2023-03-08 15:31:01,994 INFO [train.py:898] (2/4) Epoch 2, batch 2200, loss[loss=0.3891, simple_loss=0.4145, pruned_loss=0.1819, over 13020.00 frames. ], tot_loss[loss=0.3034, simple_loss=0.3612, pruned_loss=0.1228, over 3571621.03 frames. ], batch size: 129, lr: 3.95e-02, grad_scale: 2.0 +2023-03-08 15:31:29,537 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5858.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:31:46,649 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1944, 4.1222, 4.4810, 3.6712, 3.8912, 3.8911, 4.5729, 3.0722], + device='cuda:2'), covar=tensor([0.0427, 0.0190, 0.0091, 0.0246, 0.0141, 0.0587, 0.0131, 0.0631], + device='cuda:2'), in_proj_covar=tensor([0.0061, 0.0054, 0.0037, 0.0053, 0.0041, 0.0079, 0.0035, 0.0069], + device='cuda:2'), out_proj_covar=tensor([5.1062e-05, 4.4985e-05, 2.9297e-05, 4.4797e-05, 3.5730e-05, 6.5330e-05, + 3.2298e-05, 5.5444e-05], device='cuda:2') +2023-03-08 15:31:50,803 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3319, 5.9105, 5.5730, 5.6280, 5.2841, 5.6902, 6.0181, 5.8601], + device='cuda:2'), covar=tensor([0.1129, 0.0587, 0.0343, 0.0609, 0.1797, 0.0463, 0.0422, 0.0604], + device='cuda:2'), in_proj_covar=tensor([0.0259, 0.0225, 0.0175, 0.0229, 0.0316, 0.0217, 0.0217, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 15:31:51,252 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-08 15:32:00,586 INFO [train.py:898] (2/4) Epoch 2, batch 2250, loss[loss=0.2852, simple_loss=0.3541, pruned_loss=0.1082, over 18490.00 frames. ], tot_loss[loss=0.3023, simple_loss=0.3602, pruned_loss=0.1222, over 3581825.77 frames. ], batch size: 53, lr: 3.95e-02, grad_scale: 2.0 +2023-03-08 15:32:07,824 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5897, 6.0580, 5.5786, 5.7841, 5.4151, 5.8065, 6.2167, 5.9646], + device='cuda:2'), covar=tensor([0.0749, 0.0526, 0.0328, 0.0528, 0.1789, 0.0459, 0.0365, 0.0575], + device='cuda:2'), in_proj_covar=tensor([0.0258, 0.0224, 0.0178, 0.0230, 0.0319, 0.0220, 0.0218, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 15:32:38,782 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.697e+02 5.715e+02 7.226e+02 9.109e+02 2.021e+03, threshold=1.445e+03, percent-clipped=5.0 +2023-03-08 15:32:39,788 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.65 vs. limit=2.0 +2023-03-08 15:32:41,317 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5919.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:32:59,624 INFO [train.py:898] (2/4) Epoch 2, batch 2300, loss[loss=0.2866, simple_loss=0.3454, pruned_loss=0.1139, over 18488.00 frames. ], tot_loss[loss=0.3018, simple_loss=0.3602, pruned_loss=0.1217, over 3581962.71 frames. ], batch size: 47, lr: 3.94e-02, grad_scale: 2.0 +2023-03-08 15:33:26,028 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4782, 3.3432, 4.2870, 3.1579, 3.5042, 4.2951, 4.0234, 4.0297], + device='cuda:2'), covar=tensor([0.0348, 0.0382, 0.0183, 0.0548, 0.1095, 0.0055, 0.0160, 0.0182], + device='cuda:2'), in_proj_covar=tensor([0.0069, 0.0071, 0.0048, 0.0074, 0.0133, 0.0052, 0.0060, 0.0064], + device='cuda:2'), out_proj_covar=tensor([4.8119e-05, 4.9796e-05, 3.4440e-05, 5.2991e-05, 9.6101e-05, 3.2611e-05, + 4.0064e-05, 4.4728e-05], device='cuda:2') +2023-03-08 15:33:58,221 INFO [train.py:898] (2/4) Epoch 2, batch 2350, loss[loss=0.2977, simple_loss=0.3634, pruned_loss=0.1159, over 18479.00 frames. ], tot_loss[loss=0.3006, simple_loss=0.3593, pruned_loss=0.1209, over 3588488.02 frames. ], batch size: 53, lr: 3.93e-02, grad_scale: 2.0 +2023-03-08 15:34:04,396 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5990.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:34:27,131 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6006.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 15:34:39,272 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.355e+02 5.403e+02 7.312e+02 8.931e+02 1.402e+03, threshold=1.462e+03, percent-clipped=0.0 +2023-03-08 15:34:40,801 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.3790, 2.3304, 2.0244, 2.2342, 2.4125, 2.4706, 2.0472, 2.0159], + device='cuda:2'), covar=tensor([0.0350, 0.0273, 0.0658, 0.0234, 0.0370, 0.0283, 0.0361, 0.0236], + device='cuda:2'), in_proj_covar=tensor([0.0055, 0.0050, 0.0086, 0.0062, 0.0059, 0.0044, 0.0057, 0.0057], + device='cuda:2'), out_proj_covar=tensor([8.2177e-05, 7.3590e-05, 1.2322e-04, 8.2563e-05, 8.7449e-05, 6.2807e-05, + 7.7293e-05, 7.1531e-05], device='cuda:2') +2023-03-08 15:35:00,410 INFO [train.py:898] (2/4) Epoch 2, batch 2400, loss[loss=0.3096, simple_loss=0.3674, pruned_loss=0.1259, over 17978.00 frames. ], tot_loss[loss=0.3012, simple_loss=0.3591, pruned_loss=0.1216, over 3585389.26 frames. ], batch size: 65, lr: 3.92e-02, grad_scale: 4.0 +2023-03-08 15:35:02,367 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.88 vs. limit=2.0 +2023-03-08 15:35:10,242 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6043.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:35:19,728 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6051.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:35:59,380 INFO [train.py:898] (2/4) Epoch 2, batch 2450, loss[loss=0.3008, simple_loss=0.3656, pruned_loss=0.118, over 18002.00 frames. ], tot_loss[loss=0.3014, simple_loss=0.3596, pruned_loss=0.1215, over 3586190.59 frames. ], batch size: 65, lr: 3.91e-02, grad_scale: 4.0 +2023-03-08 15:36:23,033 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6104.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:36:30,702 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6111.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:36:37,237 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.501e+02 5.639e+02 7.028e+02 8.696e+02 2.308e+03, threshold=1.406e+03, percent-clipped=2.0 +2023-03-08 15:36:58,327 INFO [train.py:898] (2/4) Epoch 2, batch 2500, loss[loss=0.3087, simple_loss=0.3701, pruned_loss=0.1237, over 17740.00 frames. ], tot_loss[loss=0.3008, simple_loss=0.3594, pruned_loss=0.1211, over 3585808.16 frames. ], batch size: 70, lr: 3.90e-02, grad_scale: 4.0 +2023-03-08 15:36:58,672 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6135.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:37:07,261 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6380, 5.1292, 3.4717, 4.4582, 4.6499, 5.1141, 4.9364, 2.9579], + device='cuda:2'), covar=tensor([0.0258, 0.0067, 0.0377, 0.0082, 0.0116, 0.0065, 0.0143, 0.0956], + device='cuda:2'), in_proj_covar=tensor([0.0043, 0.0026, 0.0048, 0.0029, 0.0034, 0.0028, 0.0035, 0.0061], + device='cuda:2'), out_proj_covar=tensor([1.3168e-04, 8.1154e-05, 1.4357e-04, 1.0251e-04, 9.4601e-05, 8.5458e-05, + 1.0282e-04, 1.6030e-04], device='cuda:2') +2023-03-08 15:37:10,498 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.2286, 4.6525, 4.6388, 4.4278, 4.1765, 4.6189, 4.7471, 4.5794], + device='cuda:2'), covar=tensor([0.1328, 0.0740, 0.0652, 0.0725, 0.1852, 0.0591, 0.0577, 0.0795], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0222, 0.0178, 0.0229, 0.0323, 0.0223, 0.0223, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 15:37:39,756 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6170.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:37:54,882 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.84 vs. limit=5.0 +2023-03-08 15:37:56,495 INFO [train.py:898] (2/4) Epoch 2, batch 2550, loss[loss=0.2841, simple_loss=0.3343, pruned_loss=0.117, over 17685.00 frames. ], tot_loss[loss=0.3012, simple_loss=0.359, pruned_loss=0.1217, over 3573566.49 frames. ], batch size: 39, lr: 3.89e-02, grad_scale: 4.0 +2023-03-08 15:38:10,601 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6196.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:38:18,797 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6203.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:38:29,041 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6211.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:38:32,292 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6214.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:38:35,435 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.309e+02 5.777e+02 7.159e+02 8.638e+02 1.810e+03, threshold=1.432e+03, percent-clipped=8.0 +2023-03-08 15:38:51,745 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6231.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:38:55,993 INFO [train.py:898] (2/4) Epoch 2, batch 2600, loss[loss=0.2433, simple_loss=0.3024, pruned_loss=0.09212, over 18540.00 frames. ], tot_loss[loss=0.2988, simple_loss=0.3571, pruned_loss=0.1202, over 3581395.18 frames. ], batch size: 45, lr: 3.88e-02, grad_scale: 4.0 +2023-03-08 15:39:31,860 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6264.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:39:34,117 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6266.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:39:40,827 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6272.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:39:55,317 INFO [train.py:898] (2/4) Epoch 2, batch 2650, loss[loss=0.2508, simple_loss=0.3107, pruned_loss=0.09545, over 18399.00 frames. ], tot_loss[loss=0.2987, simple_loss=0.3572, pruned_loss=0.1202, over 3578121.45 frames. ], batch size: 42, lr: 3.87e-02, grad_scale: 4.0 +2023-03-08 15:40:15,231 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1217, 3.9894, 4.0923, 3.4945, 3.2289, 2.4911, 2.5560, 2.4895], + device='cuda:2'), covar=tensor([0.0409, 0.0327, 0.0178, 0.0137, 0.0335, 0.0545, 0.0412, 0.0399], + device='cuda:2'), in_proj_covar=tensor([0.0025, 0.0024, 0.0023, 0.0022, 0.0032, 0.0022, 0.0029, 0.0029], + device='cuda:2'), out_proj_covar=tensor([9.3308e-05, 9.9960e-05, 7.5629e-05, 8.1631e-05, 1.1852e-04, 7.9768e-05, + 9.2748e-05, 9.6762e-05], device='cuda:2') +2023-03-08 15:40:20,587 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6306.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 15:40:33,296 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.735e+02 5.474e+02 6.730e+02 8.945e+02 2.130e+03, threshold=1.346e+03, percent-clipped=7.0 +2023-03-08 15:40:45,883 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6327.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:40:51,466 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3032, 5.1238, 5.2788, 5.3331, 5.1021, 5.9050, 5.2711, 5.2421], + device='cuda:2'), covar=tensor([0.0608, 0.0569, 0.0654, 0.0485, 0.1247, 0.0670, 0.0565, 0.1645], + device='cuda:2'), in_proj_covar=tensor([0.0178, 0.0145, 0.0144, 0.0137, 0.0188, 0.0197, 0.0135, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 15:40:54,511 INFO [train.py:898] (2/4) Epoch 2, batch 2700, loss[loss=0.2962, simple_loss=0.3456, pruned_loss=0.1234, over 18220.00 frames. ], tot_loss[loss=0.2987, simple_loss=0.3574, pruned_loss=0.12, over 3592176.77 frames. ], batch size: 45, lr: 3.86e-02, grad_scale: 4.0 +2023-03-08 15:41:07,621 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6346.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:41:16,131 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.6292, 5.0647, 5.7027, 5.4556, 5.4759, 6.0479, 5.6124, 5.5365], + device='cuda:2'), covar=tensor([0.0503, 0.0546, 0.0479, 0.0462, 0.0861, 0.0539, 0.0538, 0.1003], + device='cuda:2'), in_proj_covar=tensor([0.0174, 0.0142, 0.0141, 0.0135, 0.0182, 0.0193, 0.0132, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 15:41:17,140 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6354.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 15:41:33,522 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5638, 2.9554, 4.1031, 2.6427, 3.3439, 3.7573, 3.9440, 3.7870], + device='cuda:2'), covar=tensor([0.0282, 0.0385, 0.0108, 0.0615, 0.1135, 0.0065, 0.0179, 0.0211], + device='cuda:2'), in_proj_covar=tensor([0.0074, 0.0078, 0.0050, 0.0081, 0.0145, 0.0056, 0.0068, 0.0071], + device='cuda:2'), out_proj_covar=tensor([5.3610e-05, 5.7217e-05, 3.7862e-05, 6.0208e-05, 1.0707e-04, 3.6172e-05, + 4.7166e-05, 5.2015e-05], device='cuda:2') +2023-03-08 15:41:40,721 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8711, 5.2782, 5.0958, 5.0237, 4.8180, 5.1783, 5.3535, 5.1917], + device='cuda:2'), covar=tensor([0.0777, 0.0528, 0.0421, 0.0550, 0.1436, 0.0423, 0.0476, 0.0556], + device='cuda:2'), in_proj_covar=tensor([0.0268, 0.0227, 0.0178, 0.0233, 0.0327, 0.0227, 0.0227, 0.0214], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 15:41:53,026 INFO [train.py:898] (2/4) Epoch 2, batch 2750, loss[loss=0.3328, simple_loss=0.3893, pruned_loss=0.1381, over 18282.00 frames. ], tot_loss[loss=0.2997, simple_loss=0.3581, pruned_loss=0.1207, over 3586939.23 frames. ], batch size: 57, lr: 3.85e-02, grad_scale: 4.0 +2023-03-08 15:42:10,004 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6399.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:42:24,367 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6411.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:42:31,300 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.682e+02 5.887e+02 7.195e+02 9.085e+02 1.925e+03, threshold=1.439e+03, percent-clipped=3.0 +2023-03-08 15:42:45,270 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.78 vs. limit=2.0 +2023-03-08 15:42:52,441 INFO [train.py:898] (2/4) Epoch 2, batch 2800, loss[loss=0.3034, simple_loss=0.3701, pruned_loss=0.1184, over 18628.00 frames. ], tot_loss[loss=0.2985, simple_loss=0.3574, pruned_loss=0.1198, over 3594930.49 frames. ], batch size: 52, lr: 3.84e-02, grad_scale: 8.0 +2023-03-08 15:43:21,455 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6459.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:43:25,052 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6462.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:43:30,899 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6467.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:43:51,662 INFO [train.py:898] (2/4) Epoch 2, batch 2850, loss[loss=0.3408, simple_loss=0.3931, pruned_loss=0.1443, over 17037.00 frames. ], tot_loss[loss=0.2978, simple_loss=0.3566, pruned_loss=0.1195, over 3585116.83 frames. ], batch size: 78, lr: 3.83e-02, grad_scale: 8.0 +2023-03-08 15:43:58,848 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6491.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:44:07,803 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.1993, 4.6869, 4.6159, 4.4018, 4.1733, 4.4701, 4.7814, 4.6422], + device='cuda:2'), covar=tensor([0.1070, 0.0564, 0.0739, 0.0639, 0.1595, 0.0626, 0.0431, 0.0536], + device='cuda:2'), in_proj_covar=tensor([0.0282, 0.0236, 0.0185, 0.0244, 0.0342, 0.0238, 0.0234, 0.0220], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 15:44:21,772 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5175, 2.5747, 4.2481, 2.8372, 2.8992, 3.9500, 3.9804, 3.8245], + device='cuda:2'), covar=tensor([0.0273, 0.0474, 0.0081, 0.0523, 0.1453, 0.0056, 0.0153, 0.0168], + device='cuda:2'), in_proj_covar=tensor([0.0074, 0.0080, 0.0050, 0.0081, 0.0148, 0.0058, 0.0068, 0.0072], + device='cuda:2'), out_proj_covar=tensor([5.4964e-05, 5.8946e-05, 3.8506e-05, 6.0676e-05, 1.0985e-04, 3.8306e-05, + 4.7788e-05, 5.3234e-05], device='cuda:2') +2023-03-08 15:44:26,224 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6514.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:44:29,245 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.462e+02 5.681e+02 6.833e+02 8.085e+02 2.080e+03, threshold=1.367e+03, percent-clipped=2.0 +2023-03-08 15:44:37,123 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6523.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 15:44:40,175 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6526.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:44:43,138 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6528.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:44:50,783 INFO [train.py:898] (2/4) Epoch 2, batch 2900, loss[loss=0.3306, simple_loss=0.3808, pruned_loss=0.1402, over 17201.00 frames. ], tot_loss[loss=0.2989, simple_loss=0.3575, pruned_loss=0.1201, over 3586799.51 frames. ], batch size: 78, lr: 3.82e-02, grad_scale: 8.0 +2023-03-08 15:44:57,170 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.79 vs. limit=2.0 +2023-03-08 15:45:19,411 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6559.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:45:22,844 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6562.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:45:28,533 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6567.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:45:49,894 INFO [train.py:898] (2/4) Epoch 2, batch 2950, loss[loss=0.2802, simple_loss=0.3408, pruned_loss=0.1098, over 18414.00 frames. ], tot_loss[loss=0.2976, simple_loss=0.3566, pruned_loss=0.1193, over 3567756.99 frames. ], batch size: 48, lr: 3.81e-02, grad_scale: 8.0 +2023-03-08 15:46:17,003 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4027, 5.2861, 4.5711, 5.3553, 5.2787, 4.9064, 5.3304, 4.7663], + device='cuda:2'), covar=tensor([0.0239, 0.0263, 0.1702, 0.0461, 0.0296, 0.0315, 0.0212, 0.0494], + device='cuda:2'), in_proj_covar=tensor([0.0188, 0.0202, 0.0330, 0.0162, 0.0165, 0.0195, 0.0197, 0.0227], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 15:46:27,984 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.426e+02 5.603e+02 6.658e+02 8.574e+02 1.720e+03, threshold=1.332e+03, percent-clipped=3.0 +2023-03-08 15:46:33,934 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6622.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:46:35,648 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.83 vs. limit=2.0 +2023-03-08 15:46:42,530 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6629.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:46:47,473 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=6.05 vs. limit=5.0 +2023-03-08 15:46:49,602 INFO [train.py:898] (2/4) Epoch 2, batch 3000, loss[loss=0.249, simple_loss=0.3093, pruned_loss=0.09435, over 18420.00 frames. ], tot_loss[loss=0.296, simple_loss=0.3554, pruned_loss=0.1183, over 3577190.07 frames. ], batch size: 43, lr: 3.80e-02, grad_scale: 8.0 +2023-03-08 15:46:49,603 INFO [train.py:923] (2/4) Computing validation loss +2023-03-08 15:47:01,155 INFO [train.py:932] (2/4) Epoch 2, validation: loss=0.2202, simple_loss=0.3188, pruned_loss=0.06074, over 944034.00 frames. +2023-03-08 15:47:01,156 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19432MB +2023-03-08 15:47:15,100 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6646.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:47:59,753 INFO [train.py:898] (2/4) Epoch 2, batch 3050, loss[loss=0.2848, simple_loss=0.3439, pruned_loss=0.1129, over 18368.00 frames. ], tot_loss[loss=0.2965, simple_loss=0.3555, pruned_loss=0.1188, over 3585900.20 frames. ], batch size: 46, lr: 3.79e-02, grad_scale: 8.0 +2023-03-08 15:48:05,979 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6690.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:48:10,430 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6694.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:48:17,429 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6699.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:48:37,955 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.591e+02 5.592e+02 6.340e+02 8.473e+02 1.907e+03, threshold=1.268e+03, percent-clipped=6.0 +2023-03-08 15:48:41,746 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.1422, 3.7220, 3.8925, 4.0634, 2.6908, 4.3673, 4.0206, 2.5536], + device='cuda:2'), covar=tensor([0.0128, 0.0205, 0.0175, 0.0131, 0.1056, 0.0080, 0.0118, 0.0885], + device='cuda:2'), in_proj_covar=tensor([0.0065, 0.0062, 0.0058, 0.0053, 0.0110, 0.0053, 0.0049, 0.0108], + device='cuda:2'), out_proj_covar=tensor([6.3711e-05, 5.9200e-05, 5.8311e-05, 5.4749e-05, 1.0532e-04, 4.9097e-05, + 5.5474e-05, 1.0601e-04], device='cuda:2') +2023-03-08 15:48:58,812 INFO [train.py:898] (2/4) Epoch 2, batch 3100, loss[loss=0.3295, simple_loss=0.3713, pruned_loss=0.1438, over 18256.00 frames. ], tot_loss[loss=0.2956, simple_loss=0.3548, pruned_loss=0.1182, over 3597244.72 frames. ], batch size: 47, lr: 3.79e-02, grad_scale: 8.0 +2023-03-08 15:49:12,565 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6747.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:49:14,244 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-08 15:49:57,574 INFO [train.py:898] (2/4) Epoch 2, batch 3150, loss[loss=0.3141, simple_loss=0.3779, pruned_loss=0.1252, over 18307.00 frames. ], tot_loss[loss=0.2956, simple_loss=0.3548, pruned_loss=0.1182, over 3604804.44 frames. ], batch size: 54, lr: 3.78e-02, grad_scale: 8.0 +2023-03-08 15:50:04,859 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6791.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:50:26,381 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.06 vs. limit=2.0 +2023-03-08 15:50:36,058 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.441e+02 5.682e+02 6.955e+02 9.535e+02 1.991e+03, threshold=1.391e+03, percent-clipped=9.0 +2023-03-08 15:50:37,963 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6818.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 15:50:43,418 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6823.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:50:46,930 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6826.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:50:57,128 INFO [train.py:898] (2/4) Epoch 2, batch 3200, loss[loss=0.3075, simple_loss=0.3699, pruned_loss=0.1225, over 18340.00 frames. ], tot_loss[loss=0.2963, simple_loss=0.3556, pruned_loss=0.1185, over 3579368.10 frames. ], batch size: 55, lr: 3.77e-02, grad_scale: 8.0 +2023-03-08 15:51:02,039 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6839.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:51:25,988 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6859.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:51:35,087 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6867.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:51:43,490 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6874.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:51:48,484 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6878.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 15:51:56,395 INFO [train.py:898] (2/4) Epoch 2, batch 3250, loss[loss=0.3532, simple_loss=0.3829, pruned_loss=0.1617, over 12163.00 frames. ], tot_loss[loss=0.2957, simple_loss=0.3551, pruned_loss=0.1181, over 3574250.25 frames. ], batch size: 130, lr: 3.76e-02, grad_scale: 8.0 +2023-03-08 15:52:06,092 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9614, 2.6885, 2.4932, 2.7092, 2.5522, 2.1508, 2.0484, 2.9722], + device='cuda:2'), covar=tensor([0.0190, 0.0188, 0.0456, 0.0181, 0.0299, 0.0386, 0.0483, 0.0345], + device='cuda:2'), in_proj_covar=tensor([0.0037, 0.0037, 0.0041, 0.0044, 0.0037, 0.0056, 0.0064, 0.0040], + device='cuda:2'), out_proj_covar=tensor([5.0592e-05, 5.0185e-05, 6.4291e-05, 5.9361e-05, 5.2649e-05, 7.8464e-05, + 8.9165e-05, 6.2298e-05], device='cuda:2') +2023-03-08 15:52:10,656 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1102, 3.9953, 3.8902, 3.1927, 2.9545, 2.7304, 1.9616, 1.8296], + device='cuda:2'), covar=tensor([0.0252, 0.0202, 0.0190, 0.0156, 0.0252, 0.0294, 0.0492, 0.0488], + device='cuda:2'), in_proj_covar=tensor([0.0025, 0.0024, 0.0023, 0.0023, 0.0034, 0.0022, 0.0032, 0.0032], + device='cuda:2'), out_proj_covar=tensor([1.0535e-04, 1.1051e-04, 8.5692e-05, 9.5258e-05, 1.3712e-04, 9.1954e-05, + 1.1710e-04, 1.1852e-04], device='cuda:2') +2023-03-08 15:52:22,338 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6907.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:52:24,277 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9392, 5.1289, 2.9968, 4.7838, 4.9261, 5.1518, 5.1036, 2.4129], + device='cuda:2'), covar=tensor([0.0198, 0.0041, 0.0552, 0.0059, 0.0042, 0.0070, 0.0057, 0.1152], + device='cuda:2'), in_proj_covar=tensor([0.0045, 0.0029, 0.0055, 0.0033, 0.0037, 0.0031, 0.0039, 0.0069], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-08 15:52:25,427 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6909.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:52:31,964 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6915.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:52:34,070 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.634e+02 5.392e+02 6.712e+02 8.838e+02 2.461e+03, threshold=1.342e+03, percent-clipped=3.0 +2023-03-08 15:52:39,932 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6922.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:52:54,798 INFO [train.py:898] (2/4) Epoch 2, batch 3300, loss[loss=0.2904, simple_loss=0.367, pruned_loss=0.1069, over 18309.00 frames. ], tot_loss[loss=0.2954, simple_loss=0.3551, pruned_loss=0.1178, over 3574507.14 frames. ], batch size: 54, lr: 3.75e-02, grad_scale: 8.0 +2023-03-08 15:53:00,229 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6939.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 15:53:36,602 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=6970.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:53:36,826 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6970.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:53:54,223 INFO [train.py:898] (2/4) Epoch 2, batch 3350, loss[loss=0.2971, simple_loss=0.3589, pruned_loss=0.1176, over 18360.00 frames. ], tot_loss[loss=0.2935, simple_loss=0.3537, pruned_loss=0.1167, over 3578532.15 frames. ], batch size: 56, lr: 3.74e-02, grad_scale: 8.0 +2023-03-08 15:53:54,387 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6985.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:54:32,097 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0310, 4.0665, 4.0582, 3.8248, 2.3513, 3.9803, 3.3072, 2.3946], + device='cuda:2'), covar=tensor([0.0152, 0.0112, 0.0112, 0.0156, 0.1262, 0.0086, 0.0182, 0.0864], + device='cuda:2'), in_proj_covar=tensor([0.0068, 0.0062, 0.0057, 0.0054, 0.0113, 0.0055, 0.0052, 0.0106], + device='cuda:2'), out_proj_covar=tensor([6.6482e-05, 5.9743e-05, 5.7421e-05, 5.5332e-05, 1.0913e-04, 5.2964e-05, + 5.9064e-05, 1.0522e-04], device='cuda:2') +2023-03-08 15:54:32,865 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.449e+02 5.349e+02 6.526e+02 8.040e+02 1.400e+03, threshold=1.305e+03, percent-clipped=1.0 +2023-03-08 15:54:53,424 INFO [train.py:898] (2/4) Epoch 2, batch 3400, loss[loss=0.3965, simple_loss=0.4202, pruned_loss=0.1864, over 12604.00 frames. ], tot_loss[loss=0.2932, simple_loss=0.3532, pruned_loss=0.1166, over 3584628.52 frames. ], batch size: 129, lr: 3.73e-02, grad_scale: 8.0 +2023-03-08 15:55:09,212 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7048.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 15:55:52,778 INFO [train.py:898] (2/4) Epoch 2, batch 3450, loss[loss=0.2635, simple_loss=0.3328, pruned_loss=0.09713, over 18510.00 frames. ], tot_loss[loss=0.2915, simple_loss=0.3519, pruned_loss=0.1156, over 3585376.99 frames. ], batch size: 47, lr: 3.72e-02, grad_scale: 8.0 +2023-03-08 15:56:07,535 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.99 vs. limit=2.0 +2023-03-08 15:56:20,926 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7109.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 15:56:22,040 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6238, 2.3034, 2.1187, 2.4331, 2.3443, 2.0700, 1.7688, 2.8811], + device='cuda:2'), covar=tensor([0.0193, 0.0313, 0.0537, 0.0286, 0.0356, 0.0406, 0.0522, 0.0201], + device='cuda:2'), in_proj_covar=tensor([0.0036, 0.0038, 0.0040, 0.0045, 0.0037, 0.0054, 0.0064, 0.0036], + device='cuda:2'), out_proj_covar=tensor([5.0688e-05, 5.2126e-05, 6.2936e-05, 6.0767e-05, 5.2967e-05, 7.7457e-05, + 9.1166e-05, 5.5839e-05], device='cuda:2') +2023-03-08 15:56:30,819 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.505e+02 5.757e+02 6.768e+02 8.417e+02 2.561e+03, threshold=1.354e+03, percent-clipped=6.0 +2023-03-08 15:56:32,132 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7118.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 15:56:37,318 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7123.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:56:42,386 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.88 vs. limit=5.0 +2023-03-08 15:56:51,289 INFO [train.py:898] (2/4) Epoch 2, batch 3500, loss[loss=0.2536, simple_loss=0.3148, pruned_loss=0.0962, over 18240.00 frames. ], tot_loss[loss=0.2915, simple_loss=0.3514, pruned_loss=0.1158, over 3569034.32 frames. ], batch size: 45, lr: 3.71e-02, grad_scale: 8.0 +2023-03-08 15:57:23,528 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7163.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:57:26,593 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7166.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:57:31,669 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7171.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:57:46,610 INFO [train.py:898] (2/4) Epoch 2, batch 3550, loss[loss=0.2843, simple_loss=0.3558, pruned_loss=0.1064, over 18491.00 frames. ], tot_loss[loss=0.2911, simple_loss=0.3513, pruned_loss=0.1154, over 3575639.17 frames. ], batch size: 53, lr: 3.71e-02, grad_scale: 8.0 +2023-03-08 15:58:22,322 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.147e+02 5.640e+02 6.762e+02 8.858e+02 1.958e+03, threshold=1.352e+03, percent-clipped=2.0 +2023-03-08 15:58:30,207 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7224.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 15:58:40,535 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7234.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 15:58:41,454 INFO [train.py:898] (2/4) Epoch 2, batch 3600, loss[loss=0.288, simple_loss=0.3455, pruned_loss=0.1152, over 18344.00 frames. ], tot_loss[loss=0.2914, simple_loss=0.3517, pruned_loss=0.1156, over 3580468.12 frames. ], batch size: 46, lr: 3.70e-02, grad_scale: 8.0 +2023-03-08 15:58:50,664 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7997, 2.7356, 2.4059, 2.6854, 2.5528, 2.1206, 2.0495, 2.8303], + device='cuda:2'), covar=tensor([0.0232, 0.0183, 0.0579, 0.0228, 0.0436, 0.0569, 0.0502, 0.0474], + device='cuda:2'), in_proj_covar=tensor([0.0037, 0.0039, 0.0044, 0.0046, 0.0038, 0.0057, 0.0067, 0.0039], + device='cuda:2'), out_proj_covar=tensor([5.2252e-05, 5.4498e-05, 6.8782e-05, 6.2921e-05, 5.4642e-05, 8.2353e-05, + 9.6440e-05, 6.1593e-05], device='cuda:2') +2023-03-08 15:59:12,882 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7265.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 15:59:46,481 INFO [train.py:898] (2/4) Epoch 3, batch 0, loss[loss=0.3023, simple_loss=0.363, pruned_loss=0.1208, over 16115.00 frames. ], tot_loss[loss=0.3023, simple_loss=0.363, pruned_loss=0.1208, over 16115.00 frames. ], batch size: 94, lr: 3.51e-02, grad_scale: 8.0 +2023-03-08 15:59:46,481 INFO [train.py:923] (2/4) Computing validation loss +2023-03-08 15:59:58,149 INFO [train.py:932] (2/4) Epoch 3, validation: loss=0.2228, simple_loss=0.3215, pruned_loss=0.06204, over 944034.00 frames. +2023-03-08 15:59:58,150 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19432MB +2023-03-08 16:00:05,218 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7275.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:00:16,767 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7285.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:00:25,239 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.8705, 5.2904, 5.7888, 5.5089, 5.5781, 6.3000, 5.8500, 5.5176], + device='cuda:2'), covar=tensor([0.0487, 0.0454, 0.0411, 0.0392, 0.1103, 0.0474, 0.0542, 0.1218], + device='cuda:2'), in_proj_covar=tensor([0.0180, 0.0139, 0.0141, 0.0128, 0.0186, 0.0193, 0.0131, 0.0192], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 16:00:54,864 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.526e+02 5.312e+02 6.673e+02 8.366e+02 1.427e+03, threshold=1.335e+03, percent-clipped=1.0 +2023-03-08 16:00:57,251 INFO [train.py:898] (2/4) Epoch 3, batch 50, loss[loss=0.2945, simple_loss=0.3564, pruned_loss=0.1163, over 18295.00 frames. ], tot_loss[loss=0.2807, simple_loss=0.3423, pruned_loss=0.1095, over 821149.82 frames. ], batch size: 57, lr: 3.50e-02, grad_scale: 8.0 +2023-03-08 16:01:14,310 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7333.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:01:18,117 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7336.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:01:57,071 INFO [train.py:898] (2/4) Epoch 3, batch 100, loss[loss=0.2889, simple_loss=0.3542, pruned_loss=0.1118, over 18010.00 frames. ], tot_loss[loss=0.2836, simple_loss=0.3464, pruned_loss=0.1104, over 1427205.37 frames. ], batch size: 65, lr: 3.49e-02, grad_scale: 8.0 +2023-03-08 16:02:39,749 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7404.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 16:02:43,246 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7407.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:02:54,251 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.382e+02 5.439e+02 6.621e+02 7.725e+02 1.513e+03, threshold=1.324e+03, percent-clipped=3.0 +2023-03-08 16:02:56,555 INFO [train.py:898] (2/4) Epoch 3, batch 150, loss[loss=0.277, simple_loss=0.3476, pruned_loss=0.1032, over 18269.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.345, pruned_loss=0.1089, over 1920710.36 frames. ], batch size: 57, lr: 3.48e-02, grad_scale: 8.0 +2023-03-08 16:03:04,047 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9766, 2.9477, 2.1304, 2.6110, 2.9173, 2.4147, 2.0962, 3.1999], + device='cuda:2'), covar=tensor([0.0230, 0.0221, 0.0613, 0.0215, 0.0228, 0.0288, 0.0408, 0.0332], + device='cuda:2'), in_proj_covar=tensor([0.0036, 0.0037, 0.0040, 0.0045, 0.0036, 0.0055, 0.0064, 0.0037], + device='cuda:2'), out_proj_covar=tensor([4.9783e-05, 5.2388e-05, 6.3367e-05, 6.1639e-05, 5.1967e-05, 8.1233e-05, + 9.4431e-05, 5.7972e-05], device='cuda:2') +2023-03-08 16:03:47,424 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.82 vs. limit=2.0 +2023-03-08 16:03:54,968 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7468.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:03:55,741 INFO [train.py:898] (2/4) Epoch 3, batch 200, loss[loss=0.25, simple_loss=0.3079, pruned_loss=0.09607, over 18502.00 frames. ], tot_loss[loss=0.2819, simple_loss=0.3452, pruned_loss=0.1092, over 2302318.62 frames. ], batch size: 44, lr: 3.47e-02, grad_scale: 8.0 +2023-03-08 16:04:35,693 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1193, 5.7043, 5.3656, 5.4896, 5.1064, 5.4838, 5.8147, 5.6449], + device='cuda:2'), covar=tensor([0.1025, 0.0466, 0.0401, 0.0508, 0.1409, 0.0525, 0.0431, 0.0518], + device='cuda:2'), in_proj_covar=tensor([0.0287, 0.0240, 0.0190, 0.0248, 0.0355, 0.0256, 0.0246, 0.0222], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 16:04:48,805 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1302, 5.1225, 2.7945, 4.8128, 5.0146, 2.4877, 4.3281, 3.7794], + device='cuda:2'), covar=tensor([0.0090, 0.0169, 0.1564, 0.0200, 0.0031, 0.1487, 0.0426, 0.0769], + device='cuda:2'), in_proj_covar=tensor([0.0070, 0.0079, 0.0157, 0.0109, 0.0063, 0.0142, 0.0139, 0.0134], + device='cuda:2'), out_proj_covar=tensor([7.7286e-05, 1.0742e-04, 1.6161e-04, 1.1816e-04, 6.9801e-05, 1.5148e-04, + 1.5186e-04, 1.5505e-04], device='cuda:2') +2023-03-08 16:04:51,797 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.337e+02 5.369e+02 6.599e+02 8.256e+02 1.502e+03, threshold=1.320e+03, percent-clipped=3.0 +2023-03-08 16:04:54,101 INFO [train.py:898] (2/4) Epoch 3, batch 250, loss[loss=0.289, simple_loss=0.3579, pruned_loss=0.1101, over 17779.00 frames. ], tot_loss[loss=0.2823, simple_loss=0.3453, pruned_loss=0.1096, over 2596172.73 frames. ], batch size: 70, lr: 3.47e-02, grad_scale: 8.0 +2023-03-08 16:04:54,363 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7519.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 16:05:10,718 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7534.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 16:05:47,588 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7565.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:05:51,604 INFO [train.py:898] (2/4) Epoch 3, batch 300, loss[loss=0.2451, simple_loss=0.3155, pruned_loss=0.0874, over 18283.00 frames. ], tot_loss[loss=0.2834, simple_loss=0.3466, pruned_loss=0.1101, over 2825167.69 frames. ], batch size: 49, lr: 3.46e-02, grad_scale: 8.0 +2023-03-08 16:06:06,074 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7582.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 16:06:43,935 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7613.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:06:48,190 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.523e+02 5.233e+02 6.862e+02 8.770e+02 1.930e+03, threshold=1.372e+03, percent-clipped=4.0 +2023-03-08 16:06:50,551 INFO [train.py:898] (2/4) Epoch 3, batch 350, loss[loss=0.3103, simple_loss=0.3677, pruned_loss=0.1264, over 18238.00 frames. ], tot_loss[loss=0.2848, simple_loss=0.3476, pruned_loss=0.111, over 2992189.65 frames. ], batch size: 60, lr: 3.45e-02, grad_scale: 8.0 +2023-03-08 16:06:56,743 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7624.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:07:03,567 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7067, 2.8729, 2.0958, 2.2441, 2.6546, 2.1993, 2.2279, 3.0777], + device='cuda:2'), covar=tensor([0.0207, 0.0188, 0.0572, 0.0231, 0.0264, 0.0327, 0.0363, 0.0228], + device='cuda:2'), in_proj_covar=tensor([0.0037, 0.0038, 0.0043, 0.0049, 0.0037, 0.0058, 0.0068, 0.0039], + device='cuda:2'), out_proj_covar=tensor([5.3000e-05, 5.4675e-05, 6.7278e-05, 6.6612e-05, 5.3688e-05, 8.5160e-05, + 1.0336e-04, 6.2672e-05], device='cuda:2') +2023-03-08 16:07:04,397 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7631.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:07:49,577 INFO [train.py:898] (2/4) Epoch 3, batch 400, loss[loss=0.2316, simple_loss=0.2982, pruned_loss=0.08254, over 18578.00 frames. ], tot_loss[loss=0.2821, simple_loss=0.3453, pruned_loss=0.1094, over 3127087.58 frames. ], batch size: 45, lr: 3.44e-02, grad_scale: 8.0 +2023-03-08 16:08:00,335 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.43 vs. limit=5.0 +2023-03-08 16:08:07,873 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7685.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:08:17,058 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7331, 3.2938, 1.2034, 4.1387, 2.8062, 4.5121, 1.9166, 4.1648], + device='cuda:2'), covar=tensor([0.0492, 0.1110, 0.2151, 0.0269, 0.1171, 0.0056, 0.1514, 0.0223], + device='cuda:2'), in_proj_covar=tensor([0.0092, 0.0131, 0.0130, 0.0079, 0.0123, 0.0062, 0.0126, 0.0113], + device='cuda:2'), out_proj_covar=tensor([1.2412e-04, 1.5557e-04, 1.5118e-04, 1.2140e-04, 1.5240e-04, 7.6485e-05, + 1.4557e-04, 1.3474e-04], device='cuda:2') +2023-03-08 16:08:29,792 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7704.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 16:08:47,270 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.778e+02 4.774e+02 5.790e+02 7.165e+02 2.375e+03, threshold=1.158e+03, percent-clipped=2.0 +2023-03-08 16:08:48,402 INFO [train.py:898] (2/4) Epoch 3, batch 450, loss[loss=0.3258, simple_loss=0.3847, pruned_loss=0.1334, over 17172.00 frames. ], tot_loss[loss=0.2817, simple_loss=0.345, pruned_loss=0.1092, over 3228664.23 frames. ], batch size: 78, lr: 3.44e-02, grad_scale: 8.0 +2023-03-08 16:08:56,898 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9777, 4.8764, 4.8892, 4.4161, 4.3982, 4.5865, 4.1453, 4.6467], + device='cuda:2'), covar=tensor([0.0456, 0.0317, 0.0237, 0.0378, 0.0552, 0.0342, 0.1018, 0.0361], + device='cuda:2'), in_proj_covar=tensor([0.0094, 0.0123, 0.0105, 0.0093, 0.0119, 0.0118, 0.0159, 0.0108], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-03-08 16:09:15,905 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7743.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:09:26,348 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7752.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 16:09:37,610 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0137, 5.0873, 3.2996, 4.8624, 4.5638, 5.1031, 5.0088, 2.8998], + device='cuda:2'), covar=tensor([0.0202, 0.0073, 0.0536, 0.0065, 0.0086, 0.0062, 0.0093, 0.0903], + device='cuda:2'), in_proj_covar=tensor([0.0048, 0.0034, 0.0062, 0.0037, 0.0038, 0.0034, 0.0040, 0.0074], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-08 16:09:38,584 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7763.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:09:39,037 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.02 vs. limit=5.0 +2023-03-08 16:09:46,280 INFO [train.py:898] (2/4) Epoch 3, batch 500, loss[loss=0.2926, simple_loss=0.3665, pruned_loss=0.1093, over 18278.00 frames. ], tot_loss[loss=0.2836, simple_loss=0.3469, pruned_loss=0.1102, over 3307729.18 frames. ], batch size: 49, lr: 3.43e-02, grad_scale: 8.0 +2023-03-08 16:10:26,076 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7802.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:10:28,459 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7804.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:10:30,004 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.94 vs. limit=2.0 +2023-03-08 16:10:45,271 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.544e+02 5.001e+02 6.338e+02 8.345e+02 2.616e+03, threshold=1.268e+03, percent-clipped=10.0 +2023-03-08 16:10:46,496 INFO [train.py:898] (2/4) Epoch 3, batch 550, loss[loss=0.2273, simple_loss=0.2952, pruned_loss=0.0797, over 17708.00 frames. ], tot_loss[loss=0.2827, simple_loss=0.346, pruned_loss=0.1097, over 3368294.27 frames. ], batch size: 39, lr: 3.42e-02, grad_scale: 8.0 +2023-03-08 16:10:46,677 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7819.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 16:10:57,661 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-08 16:11:37,615 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7863.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:11:41,869 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7867.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:11:44,717 INFO [train.py:898] (2/4) Epoch 3, batch 600, loss[loss=0.2844, simple_loss=0.3557, pruned_loss=0.1066, over 18378.00 frames. ], tot_loss[loss=0.2826, simple_loss=0.3461, pruned_loss=0.1095, over 3412684.23 frames. ], batch size: 55, lr: 3.41e-02, grad_scale: 8.0 +2023-03-08 16:11:57,734 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0211, 5.1983, 2.9117, 4.6910, 4.6989, 5.0183, 4.8772, 2.4635], + device='cuda:2'), covar=tensor([0.0182, 0.0049, 0.0669, 0.0079, 0.0067, 0.0069, 0.0105, 0.1211], + device='cuda:2'), in_proj_covar=tensor([0.0049, 0.0035, 0.0063, 0.0037, 0.0040, 0.0034, 0.0041, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-08 16:12:31,235 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5526, 3.7294, 4.5264, 3.0277, 4.0016, 3.8519, 4.0546, 2.4520], + device='cuda:2'), covar=tensor([0.0551, 0.0280, 0.0051, 0.0405, 0.0281, 0.0664, 0.0371, 0.0949], + device='cuda:2'), in_proj_covar=tensor([0.0087, 0.0082, 0.0052, 0.0078, 0.0079, 0.0129, 0.0059, 0.0107], + device='cuda:2'), out_proj_covar=tensor([8.5025e-05, 7.9868e-05, 4.9088e-05, 7.7280e-05, 8.1429e-05, 1.2168e-04, + 6.7665e-05, 9.8933e-05], device='cuda:2') +2023-03-08 16:12:35,765 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8014, 3.6783, 3.8554, 3.7663, 3.7481, 3.5453, 4.0225, 3.9469], + device='cuda:2'), covar=tensor([0.0114, 0.0134, 0.0098, 0.0109, 0.0108, 0.0124, 0.0099, 0.0098], + device='cuda:2'), in_proj_covar=tensor([0.0058, 0.0047, 0.0043, 0.0053, 0.0048, 0.0059, 0.0051, 0.0047], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-08 16:12:37,022 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7173, 3.1537, 3.2939, 2.6178, 2.6796, 2.7414, 2.1108, 1.8993], + device='cuda:2'), covar=tensor([0.0171, 0.0225, 0.0082, 0.0167, 0.0223, 0.0180, 0.0427, 0.0506], + device='cuda:2'), in_proj_covar=tensor([0.0024, 0.0025, 0.0023, 0.0023, 0.0035, 0.0022, 0.0036, 0.0037], + device='cuda:2'), out_proj_covar=tensor([1.1368e-04, 1.2546e-04, 9.7223e-05, 1.0723e-04, 1.6415e-04, 1.0238e-04, + 1.4836e-04, 1.5592e-04], device='cuda:2') +2023-03-08 16:12:42,166 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.737e+02 5.313e+02 6.628e+02 8.433e+02 2.088e+03, threshold=1.326e+03, percent-clipped=7.0 +2023-03-08 16:12:43,820 INFO [train.py:898] (2/4) Epoch 3, batch 650, loss[loss=0.3064, simple_loss=0.3727, pruned_loss=0.1201, over 18491.00 frames. ], tot_loss[loss=0.2829, simple_loss=0.346, pruned_loss=0.1098, over 3448977.34 frames. ], batch size: 59, lr: 3.40e-02, grad_scale: 8.0 +2023-03-08 16:12:50,571 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([1.7669, 4.5487, 4.5389, 4.3002, 2.8849, 2.3048, 4.0950, 4.5004], + device='cuda:2'), covar=tensor([0.1392, 0.0149, 0.0056, 0.0156, 0.0964, 0.1256, 0.0215, 0.0047], + device='cuda:2'), in_proj_covar=tensor([0.0102, 0.0048, 0.0045, 0.0079, 0.0114, 0.0121, 0.0080, 0.0040], + device='cuda:2'), out_proj_covar=tensor([1.5756e-04, 9.0258e-05, 7.1636e-05, 1.2216e-04, 1.6686e-04, 1.7929e-04, + 1.2215e-04, 6.2127e-05], device='cuda:2') +2023-03-08 16:12:59,146 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7931.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:13:01,034 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.08 vs. limit=5.0 +2023-03-08 16:13:12,064 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-03-08 16:13:16,897 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.10 vs. limit=5.0 +2023-03-08 16:13:26,601 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0675, 4.8251, 2.4299, 4.7038, 5.0415, 2.4756, 3.8992, 3.8500], + device='cuda:2'), covar=tensor([0.0056, 0.0325, 0.1663, 0.0256, 0.0037, 0.1431, 0.0620, 0.0788], + device='cuda:2'), in_proj_covar=tensor([0.0067, 0.0081, 0.0152, 0.0112, 0.0060, 0.0140, 0.0142, 0.0133], + device='cuda:2'), out_proj_covar=tensor([7.5853e-05, 1.1238e-04, 1.6152e-04, 1.2555e-04, 6.8794e-05, 1.5359e-04, + 1.5784e-04, 1.5538e-04], device='cuda:2') +2023-03-08 16:13:42,826 INFO [train.py:898] (2/4) Epoch 3, batch 700, loss[loss=0.2581, simple_loss=0.3217, pruned_loss=0.09728, over 18407.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.3444, pruned_loss=0.1086, over 3485338.30 frames. ], batch size: 48, lr: 3.40e-02, grad_scale: 8.0 +2023-03-08 16:13:56,307 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=7979.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:13:57,412 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7980.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:14:11,130 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3359, 2.5707, 2.3229, 2.9388, 2.9234, 3.3025, 2.4791, 3.0291], + device='cuda:2'), covar=tensor([0.0899, 0.0487, 0.1503, 0.0561, 0.0648, 0.0276, 0.1088, 0.0296], + device='cuda:2'), in_proj_covar=tensor([0.0064, 0.0053, 0.0104, 0.0076, 0.0066, 0.0046, 0.0068, 0.0059], + device='cuda:2'), out_proj_covar=tensor([1.0629e-04, 9.0338e-05, 1.6099e-04, 1.1384e-04, 1.0975e-04, 7.3081e-05, + 1.0793e-04, 8.7789e-05], device='cuda:2') +2023-03-08 16:14:45,128 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.136e+02 5.602e+02 6.674e+02 7.907e+02 1.811e+03, threshold=1.335e+03, percent-clipped=1.0 +2023-03-08 16:14:46,261 INFO [train.py:898] (2/4) Epoch 3, batch 750, loss[loss=0.2673, simple_loss=0.3385, pruned_loss=0.09801, over 18427.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.3443, pruned_loss=0.1086, over 3513133.35 frames. ], batch size: 48, lr: 3.39e-02, grad_scale: 8.0 +2023-03-08 16:15:09,945 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.92 vs. limit=5.0 +2023-03-08 16:15:38,388 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8063.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:15:44,291 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9465, 4.7104, 2.3256, 4.5680, 4.9467, 2.0904, 3.7895, 3.7033], + device='cuda:2'), covar=tensor([0.0089, 0.0348, 0.1668, 0.0246, 0.0051, 0.1658, 0.0627, 0.0751], + device='cuda:2'), in_proj_covar=tensor([0.0070, 0.0084, 0.0156, 0.0116, 0.0060, 0.0144, 0.0144, 0.0137], + device='cuda:2'), out_proj_covar=tensor([7.9240e-05, 1.1626e-04, 1.6628e-04, 1.3032e-04, 6.9095e-05, 1.5877e-04, + 1.6059e-04, 1.6070e-04], device='cuda:2') +2023-03-08 16:15:44,999 INFO [train.py:898] (2/4) Epoch 3, batch 800, loss[loss=0.2835, simple_loss=0.3557, pruned_loss=0.1057, over 18352.00 frames. ], tot_loss[loss=0.2806, simple_loss=0.3441, pruned_loss=0.1085, over 3538692.98 frames. ], batch size: 55, lr: 3.38e-02, grad_scale: 8.0 +2023-03-08 16:16:09,078 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.2736, 5.0041, 5.1478, 4.8682, 4.6886, 5.0123, 4.2819, 4.8609], + device='cuda:2'), covar=tensor([0.0464, 0.0596, 0.0320, 0.0301, 0.0635, 0.0303, 0.1302, 0.0380], + device='cuda:2'), in_proj_covar=tensor([0.0096, 0.0126, 0.0107, 0.0094, 0.0120, 0.0121, 0.0163, 0.0110], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-03-08 16:16:21,797 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8099.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:16:23,509 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.93 vs. limit=2.0 +2023-03-08 16:16:35,651 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8111.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:16:43,310 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.074e+02 5.395e+02 6.687e+02 8.203e+02 1.547e+03, threshold=1.337e+03, percent-clipped=3.0 +2023-03-08 16:16:44,990 INFO [train.py:898] (2/4) Epoch 3, batch 850, loss[loss=0.3007, simple_loss=0.3585, pruned_loss=0.1214, over 17024.00 frames. ], tot_loss[loss=0.2804, simple_loss=0.3437, pruned_loss=0.1085, over 3538342.14 frames. ], batch size: 78, lr: 3.37e-02, grad_scale: 8.0 +2023-03-08 16:17:31,465 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8158.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:17:43,831 INFO [train.py:898] (2/4) Epoch 3, batch 900, loss[loss=0.3152, simple_loss=0.371, pruned_loss=0.1297, over 18120.00 frames. ], tot_loss[loss=0.2812, simple_loss=0.3444, pruned_loss=0.109, over 3541235.70 frames. ], batch size: 62, lr: 3.37e-02, grad_scale: 8.0 +2023-03-08 16:17:45,609 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.83 vs. limit=2.0 +2023-03-08 16:18:42,376 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.447e+02 5.230e+02 6.588e+02 8.350e+02 1.625e+03, threshold=1.318e+03, percent-clipped=3.0 +2023-03-08 16:18:43,445 INFO [train.py:898] (2/4) Epoch 3, batch 950, loss[loss=0.317, simple_loss=0.374, pruned_loss=0.13, over 16133.00 frames. ], tot_loss[loss=0.2799, simple_loss=0.3436, pruned_loss=0.1081, over 3549193.15 frames. ], batch size: 94, lr: 3.36e-02, grad_scale: 8.0 +2023-03-08 16:19:29,227 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5122, 3.2196, 1.6179, 4.0291, 2.6707, 4.1569, 1.7428, 3.6826], + device='cuda:2'), covar=tensor([0.0574, 0.1129, 0.1946, 0.0378, 0.1279, 0.0098, 0.1598, 0.0397], + device='cuda:2'), in_proj_covar=tensor([0.0103, 0.0145, 0.0137, 0.0089, 0.0129, 0.0068, 0.0133, 0.0121], + device='cuda:2'), out_proj_covar=tensor([1.4129e-04, 1.7694e-04, 1.6288e-04, 1.4222e-04, 1.6303e-04, 9.0575e-05, + 1.5576e-04, 1.4731e-04], device='cuda:2') +2023-03-08 16:19:38,295 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8265.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:19:42,223 INFO [train.py:898] (2/4) Epoch 3, batch 1000, loss[loss=0.2867, simple_loss=0.3569, pruned_loss=0.1082, over 18494.00 frames. ], tot_loss[loss=0.2806, simple_loss=0.3445, pruned_loss=0.1084, over 3553433.26 frames. ], batch size: 53, lr: 3.35e-02, grad_scale: 8.0 +2023-03-08 16:19:55,492 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8280.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:20:40,394 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.475e+02 5.034e+02 6.200e+02 8.356e+02 1.633e+03, threshold=1.240e+03, percent-clipped=2.0 +2023-03-08 16:20:41,571 INFO [train.py:898] (2/4) Epoch 3, batch 1050, loss[loss=0.2726, simple_loss=0.3424, pruned_loss=0.1014, over 17090.00 frames. ], tot_loss[loss=0.2804, simple_loss=0.3443, pruned_loss=0.1082, over 3563009.37 frames. ], batch size: 78, lr: 3.34e-02, grad_scale: 8.0 +2023-03-08 16:20:49,619 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8326.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:20:51,659 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8328.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:21:40,129 INFO [train.py:898] (2/4) Epoch 3, batch 1100, loss[loss=0.2693, simple_loss=0.3335, pruned_loss=0.1025, over 18526.00 frames. ], tot_loss[loss=0.2814, simple_loss=0.3454, pruned_loss=0.1087, over 3564701.49 frames. ], batch size: 49, lr: 3.34e-02, grad_scale: 8.0 +2023-03-08 16:22:04,923 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0544, 3.6591, 4.0558, 4.2318, 2.3251, 4.4716, 4.0284, 2.9489], + device='cuda:2'), covar=tensor([0.0126, 0.0275, 0.0100, 0.0119, 0.1319, 0.0075, 0.0168, 0.0909], + device='cuda:2'), in_proj_covar=tensor([0.0079, 0.0088, 0.0065, 0.0065, 0.0131, 0.0068, 0.0069, 0.0127], + device='cuda:2'), out_proj_covar=tensor([7.6349e-05, 8.5940e-05, 6.8629e-05, 6.5280e-05, 1.2670e-04, 6.6092e-05, + 7.7555e-05, 1.2668e-04], device='cuda:2') +2023-03-08 16:22:16,118 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8399.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:22:38,112 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.304e+02 5.764e+02 6.854e+02 8.468e+02 1.709e+03, threshold=1.371e+03, percent-clipped=7.0 +2023-03-08 16:22:39,118 INFO [train.py:898] (2/4) Epoch 3, batch 1150, loss[loss=0.3445, simple_loss=0.3925, pruned_loss=0.1483, over 18261.00 frames. ], tot_loss[loss=0.2828, simple_loss=0.3464, pruned_loss=0.1096, over 3572298.63 frames. ], batch size: 60, lr: 3.33e-02, grad_scale: 8.0 +2023-03-08 16:23:12,019 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8447.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:23:26,446 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8458.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:23:38,494 INFO [train.py:898] (2/4) Epoch 3, batch 1200, loss[loss=0.2819, simple_loss=0.3481, pruned_loss=0.1079, over 18334.00 frames. ], tot_loss[loss=0.2812, simple_loss=0.3449, pruned_loss=0.1088, over 3583612.01 frames. ], batch size: 55, lr: 3.32e-02, grad_scale: 8.0 +2023-03-08 16:23:57,601 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5726, 3.4105, 2.6973, 2.8397, 3.0737, 2.3188, 2.3678, 3.3403], + device='cuda:2'), covar=tensor([0.0104, 0.0131, 0.0490, 0.0170, 0.0193, 0.0361, 0.0374, 0.0201], + device='cuda:2'), in_proj_covar=tensor([0.0039, 0.0042, 0.0045, 0.0052, 0.0040, 0.0061, 0.0071, 0.0043], + device='cuda:2'), out_proj_covar=tensor([5.6888e-05, 6.1597e-05, 7.1864e-05, 7.3929e-05, 5.8545e-05, 9.2803e-05, + 1.1049e-04, 6.7699e-05], device='cuda:2') +2023-03-08 16:24:06,855 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1697, 4.1659, 5.0456, 3.8580, 4.4298, 3.8363, 3.7697, 2.2956], + device='cuda:2'), covar=tensor([0.0454, 0.0263, 0.0031, 0.0293, 0.0260, 0.0834, 0.0451, 0.1152], + device='cuda:2'), in_proj_covar=tensor([0.0096, 0.0091, 0.0054, 0.0086, 0.0093, 0.0146, 0.0069, 0.0119], + device='cuda:2'), out_proj_covar=tensor([9.5668e-05, 9.0971e-05, 5.2649e-05, 8.6544e-05, 9.7008e-05, 1.4056e-04, + 8.0270e-05, 1.1187e-04], device='cuda:2') +2023-03-08 16:24:22,308 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8506.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:24:36,202 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.117e+02 5.075e+02 6.913e+02 8.856e+02 3.555e+03, threshold=1.383e+03, percent-clipped=10.0 +2023-03-08 16:24:37,338 INFO [train.py:898] (2/4) Epoch 3, batch 1250, loss[loss=0.2835, simple_loss=0.3538, pruned_loss=0.1067, over 18294.00 frames. ], tot_loss[loss=0.2799, simple_loss=0.3439, pruned_loss=0.108, over 3596338.59 frames. ], batch size: 54, lr: 3.31e-02, grad_scale: 8.0 +2023-03-08 16:24:41,189 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5708, 3.0841, 2.6525, 3.1316, 3.0930, 3.2970, 2.9430, 3.2460], + device='cuda:2'), covar=tensor([0.0665, 0.0331, 0.0834, 0.0299, 0.0455, 0.0300, 0.0513, 0.0216], + device='cuda:2'), in_proj_covar=tensor([0.0066, 0.0052, 0.0106, 0.0072, 0.0066, 0.0049, 0.0067, 0.0060], + device='cuda:2'), out_proj_covar=tensor([1.1228e-04, 8.9290e-05, 1.6690e-04, 1.1168e-04, 1.1210e-04, 7.8725e-05, + 1.0850e-04, 9.2686e-05], device='cuda:2') +2023-03-08 16:25:36,295 INFO [train.py:898] (2/4) Epoch 3, batch 1300, loss[loss=0.253, simple_loss=0.3135, pruned_loss=0.09619, over 17738.00 frames. ], tot_loss[loss=0.2787, simple_loss=0.3429, pruned_loss=0.1072, over 3599901.56 frames. ], batch size: 39, lr: 3.31e-02, grad_scale: 8.0 +2023-03-08 16:25:58,948 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-08 16:26:35,144 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.185e+02 5.027e+02 6.070e+02 7.698e+02 1.470e+03, threshold=1.214e+03, percent-clipped=1.0 +2023-03-08 16:26:36,226 INFO [train.py:898] (2/4) Epoch 3, batch 1350, loss[loss=0.3189, simple_loss=0.3805, pruned_loss=0.1287, over 18498.00 frames. ], tot_loss[loss=0.2773, simple_loss=0.3417, pruned_loss=0.1064, over 3606564.94 frames. ], batch size: 53, lr: 3.30e-02, grad_scale: 8.0 +2023-03-08 16:26:38,822 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8621.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:26:43,707 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.55 vs. limit=5.0 +2023-03-08 16:27:35,129 INFO [train.py:898] (2/4) Epoch 3, batch 1400, loss[loss=0.273, simple_loss=0.3377, pruned_loss=0.1042, over 18289.00 frames. ], tot_loss[loss=0.2789, simple_loss=0.343, pruned_loss=0.1074, over 3607042.04 frames. ], batch size: 49, lr: 3.29e-02, grad_scale: 8.0 +2023-03-08 16:27:43,640 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.09 vs. limit=5.0 +2023-03-08 16:28:08,112 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.4433, 5.2292, 5.2961, 4.9845, 5.0434, 5.0306, 4.5266, 4.9189], + device='cuda:2'), covar=tensor([0.0342, 0.0317, 0.0178, 0.0265, 0.0376, 0.0262, 0.0905, 0.0366], + device='cuda:2'), in_proj_covar=tensor([0.0096, 0.0131, 0.0110, 0.0098, 0.0125, 0.0125, 0.0172, 0.0115], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003], + device='cuda:2') +2023-03-08 16:28:32,214 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.853e+02 5.972e+02 7.488e+02 8.927e+02 1.917e+03, threshold=1.498e+03, percent-clipped=2.0 +2023-03-08 16:28:33,367 INFO [train.py:898] (2/4) Epoch 3, batch 1450, loss[loss=0.2263, simple_loss=0.2971, pruned_loss=0.07777, over 18271.00 frames. ], tot_loss[loss=0.2785, simple_loss=0.3426, pruned_loss=0.1072, over 3614798.26 frames. ], batch size: 45, lr: 3.29e-02, grad_scale: 8.0 +2023-03-08 16:29:21,396 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8760.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:29:32,456 INFO [train.py:898] (2/4) Epoch 3, batch 1500, loss[loss=0.2332, simple_loss=0.2977, pruned_loss=0.08436, over 18420.00 frames. ], tot_loss[loss=0.2781, simple_loss=0.3422, pruned_loss=0.1069, over 3597678.93 frames. ], batch size: 43, lr: 3.28e-02, grad_scale: 8.0 +2023-03-08 16:29:56,181 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6001, 4.5544, 4.7255, 4.6712, 4.6528, 4.6212, 5.0162, 4.9297], + device='cuda:2'), covar=tensor([0.0090, 0.0127, 0.0106, 0.0097, 0.0093, 0.0119, 0.0084, 0.0118], + device='cuda:2'), in_proj_covar=tensor([0.0059, 0.0047, 0.0044, 0.0055, 0.0049, 0.0061, 0.0054, 0.0048], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-08 16:30:03,125 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7932, 3.3157, 4.3201, 4.0197, 1.9147, 4.2931, 4.0703, 2.4076], + device='cuda:2'), covar=tensor([0.0205, 0.0423, 0.0061, 0.0144, 0.1606, 0.0078, 0.0310, 0.0950], + device='cuda:2'), in_proj_covar=tensor([0.0083, 0.0090, 0.0063, 0.0066, 0.0132, 0.0069, 0.0068, 0.0126], + device='cuda:2'), out_proj_covar=tensor([8.0934e-05, 8.7907e-05, 6.6720e-05, 6.6486e-05, 1.2814e-04, 6.7239e-05, + 7.6450e-05, 1.2695e-04], device='cuda:2') +2023-03-08 16:30:29,511 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.789e+02 5.215e+02 6.562e+02 8.622e+02 2.061e+03, threshold=1.312e+03, percent-clipped=5.0 +2023-03-08 16:30:30,744 INFO [train.py:898] (2/4) Epoch 3, batch 1550, loss[loss=0.296, simple_loss=0.3617, pruned_loss=0.1151, over 17857.00 frames. ], tot_loss[loss=0.277, simple_loss=0.3409, pruned_loss=0.1065, over 3598527.30 frames. ], batch size: 70, lr: 3.27e-02, grad_scale: 8.0 +2023-03-08 16:30:34,102 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8821.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:31:28,695 INFO [train.py:898] (2/4) Epoch 3, batch 1600, loss[loss=0.2773, simple_loss=0.3442, pruned_loss=0.1052, over 18383.00 frames. ], tot_loss[loss=0.2771, simple_loss=0.3414, pruned_loss=0.1064, over 3600142.73 frames. ], batch size: 50, lr: 3.26e-02, grad_scale: 8.0 +2023-03-08 16:31:37,012 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.63 vs. limit=2.0 +2023-03-08 16:31:41,020 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.01 vs. limit=2.0 +2023-03-08 16:32:26,433 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.256e+02 5.119e+02 6.262e+02 8.326e+02 2.097e+03, threshold=1.252e+03, percent-clipped=4.0 +2023-03-08 16:32:27,486 INFO [train.py:898] (2/4) Epoch 3, batch 1650, loss[loss=0.3243, simple_loss=0.3791, pruned_loss=0.1347, over 18300.00 frames. ], tot_loss[loss=0.2758, simple_loss=0.3404, pruned_loss=0.1056, over 3602056.53 frames. ], batch size: 57, lr: 3.26e-02, grad_scale: 8.0 +2023-03-08 16:32:30,145 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8921.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:32:38,317 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8927.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:33:09,902 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.22 vs. limit=5.0 +2023-03-08 16:33:12,887 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5690, 3.0816, 4.1331, 3.7078, 1.9350, 4.0797, 3.8958, 2.3165], + device='cuda:2'), covar=tensor([0.0219, 0.0441, 0.0089, 0.0184, 0.1662, 0.0101, 0.0165, 0.1041], + device='cuda:2'), in_proj_covar=tensor([0.0086, 0.0091, 0.0065, 0.0069, 0.0139, 0.0074, 0.0071, 0.0131], + device='cuda:2'), out_proj_covar=tensor([8.3844e-05, 8.9372e-05, 6.8777e-05, 6.9241e-05, 1.3503e-04, 7.1938e-05, + 7.8960e-05, 1.3185e-04], device='cuda:2') +2023-03-08 16:33:26,978 INFO [train.py:898] (2/4) Epoch 3, batch 1700, loss[loss=0.2854, simple_loss=0.353, pruned_loss=0.1089, over 18242.00 frames. ], tot_loss[loss=0.277, simple_loss=0.3416, pruned_loss=0.1063, over 3591130.09 frames. ], batch size: 60, lr: 3.25e-02, grad_scale: 8.0 +2023-03-08 16:33:27,142 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=8969.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:33:38,856 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1955, 2.9482, 1.5858, 3.6076, 2.5280, 3.7555, 1.9161, 3.1704], + device='cuda:2'), covar=tensor([0.0515, 0.1076, 0.2024, 0.0403, 0.1101, 0.0100, 0.1498, 0.0544], + device='cuda:2'), in_proj_covar=tensor([0.0107, 0.0152, 0.0141, 0.0100, 0.0134, 0.0070, 0.0138, 0.0127], + device='cuda:2'), out_proj_covar=tensor([1.5017e-04, 1.9060e-04, 1.7382e-04, 1.6222e-04, 1.7255e-04, 9.5435e-05, + 1.6785e-04, 1.5771e-04], device='cuda:2') +2023-03-08 16:33:48,779 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8986.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:33:51,269 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8988.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:33:52,721 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.57 vs. limit=2.0 +2023-03-08 16:34:08,672 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4953, 3.0625, 1.3182, 3.9574, 2.4845, 4.2837, 2.1606, 3.8453], + device='cuda:2'), covar=tensor([0.0458, 0.1114, 0.2178, 0.0363, 0.1197, 0.0073, 0.1398, 0.0367], + device='cuda:2'), in_proj_covar=tensor([0.0107, 0.0154, 0.0141, 0.0101, 0.0134, 0.0070, 0.0140, 0.0126], + device='cuda:2'), out_proj_covar=tensor([1.5138e-04, 1.9260e-04, 1.7435e-04, 1.6505e-04, 1.7442e-04, 9.6318e-05, + 1.7000e-04, 1.5669e-04], device='cuda:2') +2023-03-08 16:34:20,038 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9013.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:34:25,792 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.324e+02 5.263e+02 6.332e+02 7.938e+02 1.916e+03, threshold=1.266e+03, percent-clipped=4.0 +2023-03-08 16:34:27,006 INFO [train.py:898] (2/4) Epoch 3, batch 1750, loss[loss=0.2556, simple_loss=0.3209, pruned_loss=0.09517, over 18550.00 frames. ], tot_loss[loss=0.2765, simple_loss=0.3414, pruned_loss=0.1058, over 3586565.41 frames. ], batch size: 49, lr: 3.24e-02, grad_scale: 8.0 +2023-03-08 16:34:59,635 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.01 vs. limit=2.0 +2023-03-08 16:35:00,529 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9047.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:35:25,208 INFO [train.py:898] (2/4) Epoch 3, batch 1800, loss[loss=0.3099, simple_loss=0.3755, pruned_loss=0.1221, over 18090.00 frames. ], tot_loss[loss=0.2768, simple_loss=0.3415, pruned_loss=0.1061, over 3588161.15 frames. ], batch size: 62, lr: 3.24e-02, grad_scale: 8.0 +2023-03-08 16:35:31,963 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9074.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:36:20,739 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9116.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:36:22,835 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.642e+02 5.266e+02 6.349e+02 9.123e+02 1.669e+03, threshold=1.270e+03, percent-clipped=4.0 +2023-03-08 16:36:24,043 INFO [train.py:898] (2/4) Epoch 3, batch 1850, loss[loss=0.2492, simple_loss=0.3042, pruned_loss=0.09711, over 17710.00 frames. ], tot_loss[loss=0.2752, simple_loss=0.3402, pruned_loss=0.1051, over 3603474.66 frames. ], batch size: 39, lr: 3.23e-02, grad_scale: 8.0 +2023-03-08 16:37:04,449 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4496, 3.0881, 2.7712, 2.7944, 3.0287, 2.9321, 2.1846, 3.3997], + device='cuda:2'), covar=tensor([0.0056, 0.0107, 0.0316, 0.0156, 0.0134, 0.0186, 0.0321, 0.0106], + device='cuda:2'), in_proj_covar=tensor([0.0039, 0.0044, 0.0046, 0.0057, 0.0043, 0.0064, 0.0075, 0.0044], + device='cuda:2'), out_proj_covar=tensor([5.6630e-05, 6.5681e-05, 7.4348e-05, 8.3341e-05, 6.5744e-05, 9.8497e-05, + 1.2008e-04, 6.8975e-05], device='cuda:2') +2023-03-08 16:37:22,887 INFO [train.py:898] (2/4) Epoch 3, batch 1900, loss[loss=0.4379, simple_loss=0.4422, pruned_loss=0.2168, over 12946.00 frames. ], tot_loss[loss=0.2749, simple_loss=0.3401, pruned_loss=0.1049, over 3604794.27 frames. ], batch size: 129, lr: 3.22e-02, grad_scale: 8.0 +2023-03-08 16:37:43,245 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-08 16:38:07,227 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8344, 4.0026, 4.1929, 3.1986, 2.9800, 2.7876, 1.7570, 1.9256], + device='cuda:2'), covar=tensor([0.0267, 0.0268, 0.0091, 0.0229, 0.0346, 0.0307, 0.0777, 0.0816], + device='cuda:2'), in_proj_covar=tensor([0.0028, 0.0027, 0.0023, 0.0027, 0.0041, 0.0023, 0.0041, 0.0045], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 16:38:20,291 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.837e+02 5.068e+02 6.125e+02 7.856e+02 1.754e+03, threshold=1.225e+03, percent-clipped=6.0 +2023-03-08 16:38:21,520 INFO [train.py:898] (2/4) Epoch 3, batch 1950, loss[loss=0.3594, simple_loss=0.3982, pruned_loss=0.1603, over 12168.00 frames. ], tot_loss[loss=0.2765, simple_loss=0.3411, pruned_loss=0.106, over 3573432.16 frames. ], batch size: 129, lr: 3.22e-02, grad_scale: 8.0 +2023-03-08 16:39:16,261 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.90 vs. limit=2.0 +2023-03-08 16:39:21,327 INFO [train.py:898] (2/4) Epoch 3, batch 2000, loss[loss=0.322, simple_loss=0.3839, pruned_loss=0.13, over 18154.00 frames. ], tot_loss[loss=0.2754, simple_loss=0.3402, pruned_loss=0.1054, over 3572512.88 frames. ], batch size: 62, lr: 3.21e-02, grad_scale: 8.0 +2023-03-08 16:39:38,153 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9283.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:39:47,084 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-08 16:40:08,550 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.68 vs. limit=2.0 +2023-03-08 16:40:11,898 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.3576, 3.9919, 4.9206, 4.1842, 2.6926, 2.3167, 4.5584, 4.9267], + device='cuda:2'), covar=tensor([0.0945, 0.0418, 0.0063, 0.0163, 0.0992, 0.1189, 0.0179, 0.0025], + device='cuda:2'), in_proj_covar=tensor([0.0109, 0.0064, 0.0047, 0.0088, 0.0125, 0.0130, 0.0092, 0.0045], + device='cuda:2'), out_proj_covar=tensor([1.7315e-04, 1.1552e-04, 7.5842e-05, 1.4014e-04, 1.8779e-04, 1.9853e-04, + 1.4692e-04, 7.1833e-05], device='cuda:2') +2023-03-08 16:40:19,551 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.362e+02 5.058e+02 6.498e+02 8.656e+02 1.894e+03, threshold=1.300e+03, percent-clipped=4.0 +2023-03-08 16:40:20,772 INFO [train.py:898] (2/4) Epoch 3, batch 2050, loss[loss=0.2809, simple_loss=0.351, pruned_loss=0.1054, over 17806.00 frames. ], tot_loss[loss=0.2739, simple_loss=0.3388, pruned_loss=0.1045, over 3579428.27 frames. ], batch size: 70, lr: 3.20e-02, grad_scale: 8.0 +2023-03-08 16:40:44,560 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7214, 4.6960, 4.6966, 4.6033, 4.6252, 4.6441, 4.9726, 5.1394], + device='cuda:2'), covar=tensor([0.0087, 0.0118, 0.0131, 0.0117, 0.0112, 0.0107, 0.0117, 0.0120], + device='cuda:2'), in_proj_covar=tensor([0.0059, 0.0047, 0.0043, 0.0055, 0.0051, 0.0062, 0.0053, 0.0049], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 16:40:47,888 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9342.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:41:10,086 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2839, 5.8708, 5.3594, 5.5526, 5.3052, 5.6002, 5.9757, 5.7937], + device='cuda:2'), covar=tensor([0.1098, 0.0557, 0.0360, 0.0669, 0.1518, 0.0521, 0.0422, 0.0598], + device='cuda:2'), in_proj_covar=tensor([0.0316, 0.0268, 0.0207, 0.0279, 0.0400, 0.0291, 0.0288, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:2') +2023-03-08 16:41:20,232 INFO [train.py:898] (2/4) Epoch 3, batch 2100, loss[loss=0.2477, simple_loss=0.3046, pruned_loss=0.09545, over 18478.00 frames. ], tot_loss[loss=0.2729, simple_loss=0.3381, pruned_loss=0.1039, over 3583840.49 frames. ], batch size: 44, lr: 3.20e-02, grad_scale: 8.0 +2023-03-08 16:41:20,497 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9369.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:41:22,949 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1441, 4.2813, 4.5467, 3.2527, 3.1031, 3.1319, 1.8648, 1.8596], + device='cuda:2'), covar=tensor([0.0251, 0.0152, 0.0056, 0.0200, 0.0360, 0.0165, 0.0640, 0.0780], + device='cuda:2'), in_proj_covar=tensor([0.0026, 0.0026, 0.0023, 0.0027, 0.0039, 0.0022, 0.0038, 0.0045], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 16:42:13,632 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9414.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:42:14,028 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.31 vs. limit=5.0 +2023-03-08 16:42:15,830 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9416.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:42:17,371 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=2.00 vs. limit=2.0 +2023-03-08 16:42:17,749 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.310e+02 4.905e+02 6.085e+02 7.510e+02 1.544e+03, threshold=1.217e+03, percent-clipped=2.0 +2023-03-08 16:42:18,946 INFO [train.py:898] (2/4) Epoch 3, batch 2150, loss[loss=0.2664, simple_loss=0.3333, pruned_loss=0.09973, over 18538.00 frames. ], tot_loss[loss=0.2728, simple_loss=0.3382, pruned_loss=0.1038, over 3584351.35 frames. ], batch size: 49, lr: 3.19e-02, grad_scale: 8.0 +2023-03-08 16:43:13,047 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9464.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:43:18,539 INFO [train.py:898] (2/4) Epoch 3, batch 2200, loss[loss=0.2898, simple_loss=0.3444, pruned_loss=0.1176, over 18343.00 frames. ], tot_loss[loss=0.2726, simple_loss=0.3381, pruned_loss=0.1036, over 3577819.31 frames. ], batch size: 46, lr: 3.18e-02, grad_scale: 8.0 +2023-03-08 16:43:20,248 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2350, 2.6559, 2.3975, 2.6820, 3.0782, 3.3586, 2.6759, 3.0516], + device='cuda:2'), covar=tensor([0.0323, 0.0355, 0.0941, 0.0489, 0.0490, 0.0169, 0.0407, 0.0216], + device='cuda:2'), in_proj_covar=tensor([0.0072, 0.0058, 0.0112, 0.0080, 0.0068, 0.0049, 0.0072, 0.0063], + device='cuda:2'), out_proj_covar=tensor([1.2571e-04, 1.0424e-04, 1.8550e-04, 1.3103e-04, 1.2004e-04, 8.2560e-05, + 1.2589e-04, 1.0269e-04], device='cuda:2') +2023-03-08 16:43:25,695 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9475.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:44:16,068 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.011e+02 5.021e+02 6.106e+02 7.632e+02 1.377e+03, threshold=1.221e+03, percent-clipped=3.0 +2023-03-08 16:44:17,247 INFO [train.py:898] (2/4) Epoch 3, batch 2250, loss[loss=0.2408, simple_loss=0.3111, pruned_loss=0.08531, over 18496.00 frames. ], tot_loss[loss=0.2717, simple_loss=0.3372, pruned_loss=0.1031, over 3577776.78 frames. ], batch size: 47, lr: 3.18e-02, grad_scale: 8.0 +2023-03-08 16:44:29,160 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-08 16:45:16,165 INFO [train.py:898] (2/4) Epoch 3, batch 2300, loss[loss=0.2929, simple_loss=0.3537, pruned_loss=0.1161, over 18349.00 frames. ], tot_loss[loss=0.2716, simple_loss=0.3372, pruned_loss=0.103, over 3581050.66 frames. ], batch size: 55, lr: 3.17e-02, grad_scale: 8.0 +2023-03-08 16:45:32,487 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9583.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:45:33,824 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9584.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:46:14,651 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.768e+02 4.849e+02 6.155e+02 7.957e+02 1.846e+03, threshold=1.231e+03, percent-clipped=3.0 +2023-03-08 16:46:15,812 INFO [train.py:898] (2/4) Epoch 3, batch 2350, loss[loss=0.2434, simple_loss=0.3154, pruned_loss=0.08572, over 18505.00 frames. ], tot_loss[loss=0.2724, simple_loss=0.3383, pruned_loss=0.1033, over 3586650.08 frames. ], batch size: 51, lr: 3.16e-02, grad_scale: 8.0 +2023-03-08 16:46:29,871 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9631.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:46:31,488 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-03-08 16:46:42,575 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9642.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:46:46,086 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9645.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:47:00,903 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9658.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:47:14,988 INFO [train.py:898] (2/4) Epoch 3, batch 2400, loss[loss=0.2389, simple_loss=0.3091, pruned_loss=0.08429, over 18391.00 frames. ], tot_loss[loss=0.2725, simple_loss=0.3385, pruned_loss=0.1033, over 3594236.86 frames. ], batch size: 48, lr: 3.16e-02, grad_scale: 8.0 +2023-03-08 16:47:15,251 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9669.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:47:39,283 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9690.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:48:11,384 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=9717.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:48:13,582 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.796e+02 5.501e+02 6.533e+02 8.034e+02 1.544e+03, threshold=1.307e+03, percent-clipped=2.0 +2023-03-08 16:48:13,619 INFO [train.py:898] (2/4) Epoch 3, batch 2450, loss[loss=0.2696, simple_loss=0.3395, pruned_loss=0.09986, over 18298.00 frames. ], tot_loss[loss=0.2725, simple_loss=0.3382, pruned_loss=0.1034, over 3583596.66 frames. ], batch size: 57, lr: 3.15e-02, grad_scale: 8.0 +2023-03-08 16:48:14,021 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9719.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:48:41,403 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9743.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:49:11,284 INFO [train.py:898] (2/4) Epoch 3, batch 2500, loss[loss=0.3145, simple_loss=0.376, pruned_loss=0.1265, over 16198.00 frames. ], tot_loss[loss=0.2725, simple_loss=0.3386, pruned_loss=0.1032, over 3589331.94 frames. ], batch size: 94, lr: 3.14e-02, grad_scale: 8.0 +2023-03-08 16:49:13,106 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9770.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:49:35,095 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.79 vs. limit=2.0 +2023-03-08 16:49:51,949 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9804.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:49:54,570 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.67 vs. limit=2.0 +2023-03-08 16:50:09,361 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.524e+02 5.803e+02 7.293e+02 8.388e+02 1.924e+03, threshold=1.459e+03, percent-clipped=4.0 +2023-03-08 16:50:09,386 INFO [train.py:898] (2/4) Epoch 3, batch 2550, loss[loss=0.2908, simple_loss=0.3544, pruned_loss=0.1136, over 17072.00 frames. ], tot_loss[loss=0.2731, simple_loss=0.3387, pruned_loss=0.1038, over 3581398.43 frames. ], batch size: 78, lr: 3.14e-02, grad_scale: 8.0 +2023-03-08 16:51:06,870 INFO [train.py:898] (2/4) Epoch 3, batch 2600, loss[loss=0.2155, simple_loss=0.2884, pruned_loss=0.07129, over 18399.00 frames. ], tot_loss[loss=0.2719, simple_loss=0.3375, pruned_loss=0.1031, over 3578334.76 frames. ], batch size: 42, lr: 3.13e-02, grad_scale: 8.0 +2023-03-08 16:51:10,852 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6202, 3.7505, 4.2193, 2.7335, 3.6300, 3.1611, 3.2801, 2.2457], + device='cuda:2'), covar=tensor([0.0453, 0.0268, 0.0056, 0.0358, 0.0342, 0.0906, 0.0662, 0.1065], + device='cuda:2'), in_proj_covar=tensor([0.0113, 0.0110, 0.0060, 0.0096, 0.0120, 0.0168, 0.0090, 0.0137], + device='cuda:2'), out_proj_covar=tensor([1.1497e-04, 1.1315e-04, 6.2643e-05, 9.8604e-05, 1.2543e-04, 1.6338e-04, + 1.0658e-04, 1.3316e-04], device='cuda:2') +2023-03-08 16:51:29,574 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.03 vs. limit=2.0 +2023-03-08 16:51:32,783 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-03-08 16:51:56,559 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4355, 4.8321, 2.3001, 4.7522, 5.4116, 2.5424, 4.3069, 3.8560], + device='cuda:2'), covar=tensor([0.0046, 0.0598, 0.1596, 0.0327, 0.0031, 0.1444, 0.0463, 0.0768], + device='cuda:2'), in_proj_covar=tensor([0.0076, 0.0105, 0.0165, 0.0137, 0.0066, 0.0155, 0.0163, 0.0154], + device='cuda:2'), out_proj_covar=tensor([9.4844e-05, 1.5287e-04, 1.8855e-04, 1.6436e-04, 8.4009e-05, 1.8390e-04, + 1.9356e-04, 1.9278e-04], device='cuda:2') +2023-03-08 16:52:05,255 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.617e+02 5.195e+02 6.356e+02 7.709e+02 1.531e+03, threshold=1.271e+03, percent-clipped=3.0 +2023-03-08 16:52:05,281 INFO [train.py:898] (2/4) Epoch 3, batch 2650, loss[loss=0.2978, simple_loss=0.3699, pruned_loss=0.1128, over 18067.00 frames. ], tot_loss[loss=0.2707, simple_loss=0.3365, pruned_loss=0.1024, over 3582193.04 frames. ], batch size: 62, lr: 3.13e-02, grad_scale: 8.0 +2023-03-08 16:52:11,133 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-08 16:52:31,389 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9940.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:53:03,797 INFO [train.py:898] (2/4) Epoch 3, batch 2700, loss[loss=0.349, simple_loss=0.387, pruned_loss=0.1554, over 12606.00 frames. ], tot_loss[loss=0.2705, simple_loss=0.3365, pruned_loss=0.1023, over 3575126.08 frames. ], batch size: 129, lr: 3.12e-02, grad_scale: 8.0 +2023-03-08 16:54:01,207 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10014.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:54:04,910 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.04 vs. limit=2.0 +2023-03-08 16:54:06,413 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.004e+02 5.454e+02 7.297e+02 9.817e+02 2.367e+03, threshold=1.459e+03, percent-clipped=11.0 +2023-03-08 16:54:06,438 INFO [train.py:898] (2/4) Epoch 3, batch 2750, loss[loss=0.2258, simple_loss=0.2912, pruned_loss=0.08018, over 18435.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.336, pruned_loss=0.1017, over 3583477.14 frames. ], batch size: 43, lr: 3.11e-02, grad_scale: 8.0 +2023-03-08 16:54:11,774 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-08 16:54:27,764 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0681, 5.7360, 5.2704, 5.4270, 5.0904, 5.4251, 5.7609, 5.6879], + device='cuda:2'), covar=tensor([0.1162, 0.0474, 0.0460, 0.0615, 0.1597, 0.0572, 0.0463, 0.0506], + device='cuda:2'), in_proj_covar=tensor([0.0329, 0.0276, 0.0216, 0.0289, 0.0406, 0.0297, 0.0293, 0.0260], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:2') +2023-03-08 16:54:31,198 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10039.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:54:32,487 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4259, 1.8667, 4.6069, 2.7269, 3.4506, 4.7005, 4.4193, 4.1703], + device='cuda:2'), covar=tensor([0.0328, 0.0926, 0.0158, 0.0612, 0.1252, 0.0044, 0.0214, 0.0192], + device='cuda:2'), in_proj_covar=tensor([0.0102, 0.0130, 0.0068, 0.0133, 0.0215, 0.0079, 0.0110, 0.0102], + device='cuda:2'), out_proj_covar=tensor([8.4975e-05, 1.0711e-04, 5.9377e-05, 1.0378e-04, 1.7380e-04, 5.8229e-05, + 9.2033e-05, 8.1071e-05], device='cuda:2') +2023-03-08 16:54:57,037 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10062.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 16:55:04,437 INFO [train.py:898] (2/4) Epoch 3, batch 2800, loss[loss=0.2599, simple_loss=0.3228, pruned_loss=0.09852, over 18367.00 frames. ], tot_loss[loss=0.269, simple_loss=0.3355, pruned_loss=0.1012, over 3597192.88 frames. ], batch size: 50, lr: 3.11e-02, grad_scale: 8.0 +2023-03-08 16:55:05,806 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10070.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:55:40,459 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10099.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:55:41,764 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10100.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 16:56:01,591 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10118.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:56:02,535 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.762e+02 4.646e+02 5.829e+02 7.162e+02 1.376e+03, threshold=1.166e+03, percent-clipped=0.0 +2023-03-08 16:56:02,571 INFO [train.py:898] (2/4) Epoch 3, batch 2850, loss[loss=0.2995, simple_loss=0.3543, pruned_loss=0.1223, over 12466.00 frames. ], tot_loss[loss=0.2692, simple_loss=0.336, pruned_loss=0.1012, over 3597054.34 frames. ], batch size: 129, lr: 3.10e-02, grad_scale: 8.0 +2023-03-08 16:56:07,299 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10123.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 16:57:01,503 INFO [train.py:898] (2/4) Epoch 3, batch 2900, loss[loss=0.2632, simple_loss=0.3364, pruned_loss=0.09494, over 18291.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.3358, pruned_loss=0.101, over 3590369.19 frames. ], batch size: 49, lr: 3.09e-02, grad_scale: 4.0 +2023-03-08 16:57:23,409 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0042, 4.9872, 2.8178, 4.7912, 4.7079, 4.9277, 4.7684, 2.2806], + device='cuda:2'), covar=tensor([0.0144, 0.0058, 0.0688, 0.0065, 0.0054, 0.0094, 0.0100, 0.1155], + device='cuda:2'), in_proj_covar=tensor([0.0055, 0.0040, 0.0072, 0.0046, 0.0048, 0.0041, 0.0051, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-08 16:57:41,408 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5294, 5.9821, 5.2363, 5.6615, 5.4799, 5.6844, 6.0867, 6.0123], + device='cuda:2'), covar=tensor([0.0942, 0.0521, 0.0427, 0.0623, 0.1348, 0.0597, 0.0432, 0.0483], + device='cuda:2'), in_proj_covar=tensor([0.0334, 0.0270, 0.0217, 0.0294, 0.0405, 0.0294, 0.0295, 0.0254], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:2') +2023-03-08 16:58:00,077 INFO [train.py:898] (2/4) Epoch 3, batch 2950, loss[loss=0.2612, simple_loss=0.3345, pruned_loss=0.0939, over 18377.00 frames. ], tot_loss[loss=0.2694, simple_loss=0.3361, pruned_loss=0.1014, over 3584971.15 frames. ], batch size: 50, lr: 3.09e-02, grad_scale: 4.0 +2023-03-08 16:58:01,193 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.457e+02 4.931e+02 6.360e+02 7.565e+02 1.819e+03, threshold=1.272e+03, percent-clipped=6.0 +2023-03-08 16:58:25,435 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10240.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:58:59,070 INFO [train.py:898] (2/4) Epoch 3, batch 3000, loss[loss=0.2586, simple_loss=0.3329, pruned_loss=0.09215, over 18247.00 frames. ], tot_loss[loss=0.2689, simple_loss=0.3359, pruned_loss=0.1009, over 3575165.11 frames. ], batch size: 60, lr: 3.08e-02, grad_scale: 4.0 +2023-03-08 16:58:59,071 INFO [train.py:923] (2/4) Computing validation loss +2023-03-08 16:59:05,640 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2101, 2.6575, 3.6798, 3.1338, 2.2102, 3.7457, 3.5186, 2.3972], + device='cuda:2'), covar=tensor([0.0256, 0.0497, 0.0092, 0.0236, 0.1143, 0.0098, 0.0219, 0.0874], + device='cuda:2'), in_proj_covar=tensor([0.0096, 0.0108, 0.0071, 0.0075, 0.0145, 0.0083, 0.0085, 0.0142], + device='cuda:2'), out_proj_covar=tensor([9.4591e-05, 1.0461e-04, 7.4626e-05, 7.4121e-05, 1.4094e-04, 8.1319e-05, + 9.3014e-05, 1.4317e-04], device='cuda:2') +2023-03-08 16:59:10,962 INFO [train.py:932] (2/4) Epoch 3, validation: loss=0.2015, simple_loss=0.3025, pruned_loss=0.05021, over 944034.00 frames. +2023-03-08 16:59:10,963 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-08 16:59:26,402 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.6508, 5.5246, 4.9547, 5.5096, 5.5610, 5.0880, 5.4406, 4.9789], + device='cuda:2'), covar=tensor([0.0235, 0.0310, 0.1823, 0.0628, 0.0278, 0.0284, 0.0331, 0.0565], + device='cuda:2'), in_proj_covar=tensor([0.0230, 0.0255, 0.0405, 0.0201, 0.0186, 0.0232, 0.0252, 0.0290], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004], + device='cuda:2') +2023-03-08 16:59:28,718 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3617, 2.5920, 2.4651, 2.8267, 3.3402, 3.3150, 2.6681, 3.0088], + device='cuda:2'), covar=tensor([0.0464, 0.0455, 0.0911, 0.0458, 0.0357, 0.0282, 0.0473, 0.0218], + device='cuda:2'), in_proj_covar=tensor([0.0075, 0.0059, 0.0112, 0.0082, 0.0066, 0.0048, 0.0075, 0.0065], + device='cuda:2'), out_proj_covar=tensor([1.3628e-04, 1.0749e-04, 1.9008e-04, 1.3595e-04, 1.1974e-04, 8.1251e-05, + 1.3201e-04, 1.1140e-04], device='cuda:2') +2023-03-08 16:59:34,255 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10288.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 16:59:38,113 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.67 vs. limit=2.0 +2023-03-08 17:00:03,800 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10314.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:00:08,961 INFO [train.py:898] (2/4) Epoch 3, batch 3050, loss[loss=0.3143, simple_loss=0.356, pruned_loss=0.1363, over 12304.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.3358, pruned_loss=0.1016, over 3562003.45 frames. ], batch size: 130, lr: 3.08e-02, grad_scale: 4.0 +2023-03-08 17:00:10,063 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.678e+02 5.040e+02 6.054e+02 8.050e+02 1.536e+03, threshold=1.211e+03, percent-clipped=3.0 +2023-03-08 17:00:41,558 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([1.8679, 4.0207, 5.0119, 3.7148, 2.7528, 2.1802, 4.4287, 4.9172], + device='cuda:2'), covar=tensor([0.1174, 0.0596, 0.0033, 0.0314, 0.0968, 0.1198, 0.0193, 0.0030], + device='cuda:2'), in_proj_covar=tensor([0.0117, 0.0086, 0.0051, 0.0103, 0.0134, 0.0139, 0.0101, 0.0049], + device='cuda:2'), out_proj_covar=tensor([1.8733e-04, 1.5212e-04, 8.4002e-05, 1.6536e-04, 2.0360e-04, 2.1350e-04, + 1.6287e-04, 7.8705e-05], device='cuda:2') +2023-03-08 17:00:59,201 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10362.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:01:06,332 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7389, 4.6162, 2.6811, 4.3896, 4.4720, 4.6995, 4.5071, 2.5333], + device='cuda:2'), covar=tensor([0.0180, 0.0061, 0.0708, 0.0093, 0.0065, 0.0053, 0.0103, 0.1083], + device='cuda:2'), in_proj_covar=tensor([0.0058, 0.0042, 0.0076, 0.0048, 0.0051, 0.0042, 0.0053, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-08 17:01:07,132 INFO [train.py:898] (2/4) Epoch 3, batch 3100, loss[loss=0.2466, simple_loss=0.3162, pruned_loss=0.08854, over 16032.00 frames. ], tot_loss[loss=0.2695, simple_loss=0.3365, pruned_loss=0.1013, over 3571364.39 frames. ], batch size: 94, lr: 3.07e-02, grad_scale: 4.0 +2023-03-08 17:01:33,390 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2413, 5.1787, 4.4979, 5.1173, 5.1721, 4.5576, 5.0971, 4.5438], + device='cuda:2'), covar=tensor([0.0307, 0.0327, 0.1631, 0.0611, 0.0312, 0.0375, 0.0284, 0.0584], + device='cuda:2'), in_proj_covar=tensor([0.0240, 0.0261, 0.0422, 0.0207, 0.0193, 0.0239, 0.0262, 0.0301], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-08 17:01:38,742 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10395.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 17:01:43,367 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10399.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:01:52,713 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4080, 2.1884, 1.9507, 2.1709, 2.4723, 2.5779, 1.9975, 2.2189], + device='cuda:2'), covar=tensor([0.0323, 0.0292, 0.0696, 0.0301, 0.0273, 0.0121, 0.0458, 0.0226], + device='cuda:2'), in_proj_covar=tensor([0.0073, 0.0059, 0.0116, 0.0079, 0.0066, 0.0048, 0.0075, 0.0067], + device='cuda:2'), out_proj_covar=tensor([1.3307e-04, 1.0767e-04, 1.9623e-04, 1.3242e-04, 1.1959e-04, 8.1021e-05, + 1.3306e-04, 1.1654e-04], device='cuda:2') +2023-03-08 17:01:55,020 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10409.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:02:05,054 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10418.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 17:02:06,000 INFO [train.py:898] (2/4) Epoch 3, batch 3150, loss[loss=0.2631, simple_loss=0.3364, pruned_loss=0.09485, over 16943.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.335, pruned_loss=0.1003, over 3568373.92 frames. ], batch size: 78, lr: 3.06e-02, grad_scale: 4.0 +2023-03-08 17:02:07,190 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.149e+02 4.695e+02 5.685e+02 7.343e+02 1.972e+03, threshold=1.137e+03, percent-clipped=4.0 +2023-03-08 17:02:28,952 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0889, 2.0984, 3.6423, 2.6826, 3.0814, 3.6625, 3.3315, 3.1577], + device='cuda:2'), covar=tensor([0.0174, 0.0568, 0.0136, 0.0410, 0.0703, 0.0063, 0.0182, 0.0152], + device='cuda:2'), in_proj_covar=tensor([0.0107, 0.0135, 0.0071, 0.0138, 0.0220, 0.0079, 0.0113, 0.0107], + device='cuda:2'), out_proj_covar=tensor([8.9282e-05, 1.1057e-04, 6.2244e-05, 1.0650e-04, 1.7719e-04, 5.9079e-05, + 9.4760e-05, 8.6286e-05], device='cuda:2') +2023-03-08 17:02:30,025 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0289, 5.2354, 3.1620, 4.9955, 4.8693, 5.2108, 4.9775, 2.6805], + device='cuda:2'), covar=tensor([0.0157, 0.0034, 0.0557, 0.0056, 0.0060, 0.0043, 0.0085, 0.1000], + device='cuda:2'), in_proj_covar=tensor([0.0056, 0.0041, 0.0074, 0.0047, 0.0051, 0.0041, 0.0052, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-08 17:02:40,171 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10447.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:02:48,075 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10454.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:03:02,687 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6772, 1.9119, 4.8048, 3.0488, 3.7250, 4.9003, 4.5393, 4.5756], + device='cuda:2'), covar=tensor([0.0226, 0.0737, 0.0128, 0.0473, 0.0952, 0.0027, 0.0157, 0.0115], + device='cuda:2'), in_proj_covar=tensor([0.0107, 0.0135, 0.0070, 0.0137, 0.0220, 0.0079, 0.0112, 0.0107], + device='cuda:2'), out_proj_covar=tensor([8.9119e-05, 1.1114e-04, 6.1597e-05, 1.0615e-04, 1.7654e-04, 5.9236e-05, + 9.4307e-05, 8.5827e-05], device='cuda:2') +2023-03-08 17:03:03,966 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.28 vs. limit=5.0 +2023-03-08 17:03:04,452 INFO [train.py:898] (2/4) Epoch 3, batch 3200, loss[loss=0.273, simple_loss=0.3365, pruned_loss=0.1048, over 17715.00 frames. ], tot_loss[loss=0.2672, simple_loss=0.3343, pruned_loss=0.1, over 3579925.06 frames. ], batch size: 70, lr: 3.06e-02, grad_scale: 8.0 +2023-03-08 17:03:05,901 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10470.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:03:08,330 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.0983, 4.0239, 5.0031, 3.7943, 3.1228, 2.4479, 4.3616, 5.0069], + device='cuda:2'), covar=tensor([0.1084, 0.0539, 0.0036, 0.0275, 0.0772, 0.1125, 0.0222, 0.0027], + device='cuda:2'), in_proj_covar=tensor([0.0119, 0.0089, 0.0053, 0.0104, 0.0134, 0.0140, 0.0104, 0.0050], + device='cuda:2'), out_proj_covar=tensor([1.9082e-04, 1.5736e-04, 8.7018e-05, 1.6791e-04, 2.0448e-04, 2.1437e-04, + 1.6734e-04, 8.1304e-05], device='cuda:2') +2023-03-08 17:03:50,052 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4295, 2.4739, 3.7105, 3.5852, 2.3079, 3.8672, 3.6318, 2.4892], + device='cuda:2'), covar=tensor([0.0198, 0.0660, 0.0095, 0.0148, 0.1118, 0.0089, 0.0159, 0.0839], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0117, 0.0074, 0.0081, 0.0147, 0.0085, 0.0086, 0.0144], + device='cuda:2'), out_proj_covar=tensor([9.8212e-05, 1.1351e-04, 7.7836e-05, 7.9559e-05, 1.4306e-04, 8.3661e-05, + 9.4077e-05, 1.4569e-04], device='cuda:2') +2023-03-08 17:03:59,363 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10515.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 17:04:03,373 INFO [train.py:898] (2/4) Epoch 3, batch 3250, loss[loss=0.284, simple_loss=0.3489, pruned_loss=0.1095, over 18502.00 frames. ], tot_loss[loss=0.2674, simple_loss=0.3342, pruned_loss=0.1003, over 3573635.32 frames. ], batch size: 59, lr: 3.05e-02, grad_scale: 8.0 +2023-03-08 17:04:04,540 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.357e+02 4.994e+02 6.141e+02 8.166e+02 2.371e+03, threshold=1.228e+03, percent-clipped=6.0 +2023-03-08 17:04:55,759 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.20 vs. limit=5.0 +2023-03-08 17:05:01,932 INFO [train.py:898] (2/4) Epoch 3, batch 3300, loss[loss=0.2341, simple_loss=0.2976, pruned_loss=0.08528, over 18499.00 frames. ], tot_loss[loss=0.2679, simple_loss=0.335, pruned_loss=0.1004, over 3571913.46 frames. ], batch size: 44, lr: 3.05e-02, grad_scale: 8.0 +2023-03-08 17:05:30,833 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-08 17:06:01,152 INFO [train.py:898] (2/4) Epoch 3, batch 3350, loss[loss=0.2599, simple_loss=0.3339, pruned_loss=0.09295, over 15982.00 frames. ], tot_loss[loss=0.2666, simple_loss=0.3338, pruned_loss=0.09964, over 3574391.37 frames. ], batch size: 94, lr: 3.04e-02, grad_scale: 8.0 +2023-03-08 17:06:02,284 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.520e+02 5.384e+02 6.304e+02 8.125e+02 1.835e+03, threshold=1.261e+03, percent-clipped=2.0 +2023-03-08 17:06:20,519 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6809, 3.1625, 3.3572, 2.6324, 2.5173, 2.5441, 2.0355, 1.9502], + device='cuda:2'), covar=tensor([0.0201, 0.0145, 0.0056, 0.0190, 0.0304, 0.0142, 0.0565, 0.0598], + device='cuda:2'), in_proj_covar=tensor([0.0028, 0.0029, 0.0023, 0.0030, 0.0044, 0.0024, 0.0048, 0.0050], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 17:06:58,359 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.0581, 3.9848, 4.9844, 3.8785, 2.6604, 2.3903, 4.4827, 5.0734], + device='cuda:2'), covar=tensor([0.1240, 0.0739, 0.0035, 0.0319, 0.1063, 0.1283, 0.0236, 0.0027], + device='cuda:2'), in_proj_covar=tensor([0.0119, 0.0093, 0.0053, 0.0104, 0.0135, 0.0140, 0.0107, 0.0049], + device='cuda:2'), out_proj_covar=tensor([1.9210e-04, 1.6370e-04, 8.8402e-05, 1.6840e-04, 2.0624e-04, 2.1626e-04, + 1.7184e-04, 8.0651e-05], device='cuda:2') +2023-03-08 17:06:59,030 INFO [train.py:898] (2/4) Epoch 3, batch 3400, loss[loss=0.2426, simple_loss=0.3263, pruned_loss=0.07948, over 18474.00 frames. ], tot_loss[loss=0.2659, simple_loss=0.3334, pruned_loss=0.09923, over 3580554.25 frames. ], batch size: 53, lr: 3.03e-02, grad_scale: 8.0 +2023-03-08 17:07:30,294 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10695.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:07:30,746 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-03-08 17:07:56,796 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10718.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 17:07:57,571 INFO [train.py:898] (2/4) Epoch 3, batch 3450, loss[loss=0.248, simple_loss=0.3135, pruned_loss=0.09126, over 18445.00 frames. ], tot_loss[loss=0.267, simple_loss=0.3339, pruned_loss=0.09999, over 3572011.52 frames. ], batch size: 43, lr: 3.03e-02, grad_scale: 8.0 +2023-03-08 17:07:58,709 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.826e+02 5.643e+02 6.682e+02 8.838e+02 1.430e+03, threshold=1.336e+03, percent-clipped=8.0 +2023-03-08 17:08:24,823 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10743.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:08:26,093 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10744.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:08:51,323 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10765.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:08:52,389 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=10766.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 17:08:55,406 INFO [train.py:898] (2/4) Epoch 3, batch 3500, loss[loss=0.2781, simple_loss=0.3427, pruned_loss=0.1068, over 18272.00 frames. ], tot_loss[loss=0.267, simple_loss=0.3342, pruned_loss=0.09992, over 3558266.78 frames. ], batch size: 57, lr: 3.02e-02, grad_scale: 8.0 +2023-03-08 17:09:01,644 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10774.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:09:31,078 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5264, 5.4167, 4.7436, 5.4732, 5.4500, 4.8124, 5.3756, 4.8980], + device='cuda:2'), covar=tensor([0.0287, 0.0295, 0.1789, 0.0585, 0.0272, 0.0383, 0.0282, 0.0463], + device='cuda:2'), in_proj_covar=tensor([0.0244, 0.0264, 0.0424, 0.0213, 0.0195, 0.0243, 0.0264, 0.0307], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-08 17:09:36,665 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10805.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:09:41,878 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10810.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 17:09:51,965 INFO [train.py:898] (2/4) Epoch 3, batch 3550, loss[loss=0.2728, simple_loss=0.3376, pruned_loss=0.104, over 18238.00 frames. ], tot_loss[loss=0.264, simple_loss=0.3318, pruned_loss=0.09809, over 3574551.96 frames. ], batch size: 60, lr: 3.02e-02, grad_scale: 8.0 +2023-03-08 17:09:52,948 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.843e+02 4.878e+02 6.150e+02 7.630e+02 1.368e+03, threshold=1.230e+03, percent-clipped=1.0 +2023-03-08 17:10:09,081 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10835.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:10:19,915 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7320, 4.5995, 4.7354, 4.4956, 4.4711, 4.6635, 4.9269, 5.0362], + device='cuda:2'), covar=tensor([0.0067, 0.0090, 0.0107, 0.0091, 0.0094, 0.0087, 0.0076, 0.0077], + device='cuda:2'), in_proj_covar=tensor([0.0060, 0.0048, 0.0045, 0.0056, 0.0052, 0.0064, 0.0052, 0.0050], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 17:10:46,351 INFO [train.py:898] (2/4) Epoch 3, batch 3600, loss[loss=0.2638, simple_loss=0.3399, pruned_loss=0.09382, over 17953.00 frames. ], tot_loss[loss=0.2643, simple_loss=0.3318, pruned_loss=0.09838, over 3580511.61 frames. ], batch size: 65, lr: 3.01e-02, grad_scale: 8.0 +2023-03-08 17:10:50,143 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-08 17:11:03,918 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10885.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 17:11:51,265 INFO [train.py:898] (2/4) Epoch 4, batch 0, loss[loss=0.2563, simple_loss=0.3246, pruned_loss=0.09401, over 18391.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.3246, pruned_loss=0.09401, over 18391.00 frames. ], batch size: 50, lr: 2.81e-02, grad_scale: 8.0 +2023-03-08 17:11:51,265 INFO [train.py:923] (2/4) Computing validation loss +2023-03-08 17:12:03,129 INFO [train.py:932] (2/4) Epoch 4, validation: loss=0.2018, simple_loss=0.3032, pruned_loss=0.05022, over 944034.00 frames. +2023-03-08 17:12:03,130 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-08 17:12:21,318 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-08 17:12:22,935 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.062e+02 5.140e+02 6.071e+02 7.420e+02 1.697e+03, threshold=1.214e+03, percent-clipped=4.0 +2023-03-08 17:12:55,147 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10946.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 17:13:02,668 INFO [train.py:898] (2/4) Epoch 4, batch 50, loss[loss=0.2757, simple_loss=0.3513, pruned_loss=0.1001, over 18345.00 frames. ], tot_loss[loss=0.2638, simple_loss=0.332, pruned_loss=0.09786, over 813023.87 frames. ], batch size: 56, lr: 2.81e-02, grad_scale: 8.0 +2023-03-08 17:13:05,401 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.2255, 4.2540, 5.2016, 3.9785, 2.8025, 2.6508, 4.6028, 5.2717], + device='cuda:2'), covar=tensor([0.0980, 0.0496, 0.0036, 0.0279, 0.0876, 0.0983, 0.0197, 0.0020], + device='cuda:2'), in_proj_covar=tensor([0.0119, 0.0096, 0.0055, 0.0105, 0.0138, 0.0142, 0.0108, 0.0051], + device='cuda:2'), out_proj_covar=tensor([1.9227e-04, 1.6904e-04, 9.1365e-05, 1.7116e-04, 2.1167e-04, 2.1999e-04, + 1.7434e-04, 8.4419e-05], device='cuda:2') +2023-03-08 17:14:00,535 INFO [train.py:898] (2/4) Epoch 4, batch 100, loss[loss=0.2852, simple_loss=0.3538, pruned_loss=0.1083, over 17804.00 frames. ], tot_loss[loss=0.2659, simple_loss=0.3337, pruned_loss=0.09908, over 1427837.38 frames. ], batch size: 70, lr: 2.80e-02, grad_scale: 8.0 +2023-03-08 17:14:19,670 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.738e+02 4.868e+02 5.817e+02 7.449e+02 2.139e+03, threshold=1.163e+03, percent-clipped=6.0 +2023-03-08 17:14:58,818 INFO [train.py:898] (2/4) Epoch 4, batch 150, loss[loss=0.2967, simple_loss=0.3607, pruned_loss=0.1164, over 17061.00 frames. ], tot_loss[loss=0.2629, simple_loss=0.3316, pruned_loss=0.09708, over 1904442.10 frames. ], batch size: 78, lr: 2.80e-02, grad_scale: 8.0 +2023-03-08 17:15:12,453 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11065.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:15:54,009 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11100.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:15:57,091 INFO [train.py:898] (2/4) Epoch 4, batch 200, loss[loss=0.2795, simple_loss=0.3518, pruned_loss=0.1036, over 18551.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3308, pruned_loss=0.09617, over 2290081.68 frames. ], batch size: 54, lr: 2.79e-02, grad_scale: 8.0 +2023-03-08 17:16:00,975 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5642, 1.9108, 4.5369, 3.0178, 3.5200, 4.6458, 4.4634, 4.0784], + device='cuda:2'), covar=tensor([0.0245, 0.0716, 0.0154, 0.0480, 0.0972, 0.0025, 0.0171, 0.0191], + device='cuda:2'), in_proj_covar=tensor([0.0112, 0.0145, 0.0075, 0.0146, 0.0228, 0.0081, 0.0117, 0.0109], + device='cuda:2'), out_proj_covar=tensor([9.2963e-05, 1.1918e-04, 6.5650e-05, 1.1222e-04, 1.8284e-04, 5.9810e-05, + 9.7052e-05, 8.7487e-05], device='cuda:2') +2023-03-08 17:16:05,306 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11110.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 17:16:08,776 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11113.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:16:16,249 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.946e+02 4.745e+02 5.920e+02 7.633e+02 1.560e+03, threshold=1.184e+03, percent-clipped=4.0 +2023-03-08 17:16:27,671 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11130.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:16:55,263 INFO [train.py:898] (2/4) Epoch 4, batch 250, loss[loss=0.2828, simple_loss=0.3555, pruned_loss=0.1051, over 18475.00 frames. ], tot_loss[loss=0.259, simple_loss=0.3283, pruned_loss=0.09487, over 2588877.24 frames. ], batch size: 59, lr: 2.79e-02, grad_scale: 8.0 +2023-03-08 17:17:01,242 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11158.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:17:04,824 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1513, 4.4660, 1.8928, 4.4277, 5.0823, 2.2495, 3.5010, 3.5749], + device='cuda:2'), covar=tensor([0.0054, 0.0537, 0.1758, 0.0345, 0.0045, 0.1477, 0.0675, 0.0788], + device='cuda:2'), in_proj_covar=tensor([0.0071, 0.0116, 0.0161, 0.0139, 0.0064, 0.0150, 0.0161, 0.0154], + device='cuda:2'), out_proj_covar=tensor([9.3289e-05, 1.6916e-04, 1.9238e-04, 1.7335e-04, 8.5599e-05, 1.8495e-04, + 1.9731e-04, 1.9755e-04], device='cuda:2') +2023-03-08 17:17:11,525 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8560, 2.8124, 3.9167, 4.3731, 2.3688, 4.4355, 3.7011, 2.5748], + device='cuda:2'), covar=tensor([0.0205, 0.0746, 0.0103, 0.0095, 0.1245, 0.0069, 0.0207, 0.1001], + device='cuda:2'), in_proj_covar=tensor([0.0110, 0.0134, 0.0080, 0.0085, 0.0158, 0.0096, 0.0096, 0.0152], + device='cuda:2'), out_proj_covar=tensor([1.0698e-04, 1.3095e-04, 8.3774e-05, 8.2727e-05, 1.5345e-04, 9.1964e-05, + 1.0304e-04, 1.5400e-04], device='cuda:2') +2023-03-08 17:17:19,441 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11174.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:17:27,090 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0015, 4.8036, 4.8957, 4.9780, 4.7511, 4.8062, 5.3619, 5.3306], + device='cuda:2'), covar=tensor([0.0057, 0.0090, 0.0090, 0.0065, 0.0074, 0.0094, 0.0064, 0.0079], + device='cuda:2'), in_proj_covar=tensor([0.0060, 0.0047, 0.0045, 0.0057, 0.0052, 0.0064, 0.0053, 0.0050], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 17:17:51,146 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2647, 4.2087, 4.3475, 3.3015, 3.0166, 2.7318, 2.2094, 1.5951], + device='cuda:2'), covar=tensor([0.0229, 0.0120, 0.0049, 0.0194, 0.0335, 0.0152, 0.0663, 0.0836], + device='cuda:2'), in_proj_covar=tensor([0.0029, 0.0030, 0.0025, 0.0032, 0.0049, 0.0024, 0.0049, 0.0052], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0003, 0.0001, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 17:17:54,162 INFO [train.py:898] (2/4) Epoch 4, batch 300, loss[loss=0.2955, simple_loss=0.3632, pruned_loss=0.1139, over 15934.00 frames. ], tot_loss[loss=0.261, simple_loss=0.3302, pruned_loss=0.0959, over 2795204.03 frames. ], batch size: 94, lr: 2.78e-02, grad_scale: 8.0 +2023-03-08 17:18:13,960 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.374e+02 4.613e+02 5.593e+02 6.411e+02 1.138e+03, threshold=1.119e+03, percent-clipped=0.0 +2023-03-08 17:18:31,481 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11235.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:18:38,665 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11241.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 17:18:53,642 INFO [train.py:898] (2/4) Epoch 4, batch 350, loss[loss=0.2883, simple_loss=0.3573, pruned_loss=0.1097, over 17624.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.3294, pruned_loss=0.09486, over 2969247.19 frames. ], batch size: 70, lr: 2.78e-02, grad_scale: 8.0 +2023-03-08 17:19:18,281 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9951, 4.8710, 4.8762, 4.7804, 4.7959, 4.7545, 5.2065, 5.2177], + device='cuda:2'), covar=tensor([0.0059, 0.0087, 0.0106, 0.0076, 0.0082, 0.0110, 0.0090, 0.0098], + device='cuda:2'), in_proj_covar=tensor([0.0061, 0.0047, 0.0045, 0.0057, 0.0053, 0.0065, 0.0055, 0.0051], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 17:19:52,173 INFO [train.py:898] (2/4) Epoch 4, batch 400, loss[loss=0.2154, simple_loss=0.2841, pruned_loss=0.07333, over 18155.00 frames. ], tot_loss[loss=0.2577, simple_loss=0.3278, pruned_loss=0.09382, over 3105658.08 frames. ], batch size: 44, lr: 2.77e-02, grad_scale: 8.0 +2023-03-08 17:20:04,704 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.3082, 4.1760, 4.3796, 4.1437, 4.1033, 4.1887, 4.4283, 4.4563], + device='cuda:2'), covar=tensor([0.0078, 0.0105, 0.0083, 0.0088, 0.0094, 0.0106, 0.0097, 0.0084], + device='cuda:2'), in_proj_covar=tensor([0.0060, 0.0046, 0.0045, 0.0057, 0.0052, 0.0064, 0.0055, 0.0050], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 17:20:10,948 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.476e+02 4.514e+02 5.687e+02 6.910e+02 1.325e+03, threshold=1.137e+03, percent-clipped=4.0 +2023-03-08 17:20:25,364 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-08 17:20:44,451 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6827, 5.2226, 5.3958, 5.0008, 5.0348, 5.1283, 4.5746, 5.1463], + device='cuda:2'), covar=tensor([0.0209, 0.0292, 0.0139, 0.0211, 0.0285, 0.0216, 0.0916, 0.0216], + device='cuda:2'), in_proj_covar=tensor([0.0109, 0.0144, 0.0130, 0.0118, 0.0139, 0.0141, 0.0203, 0.0126], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0004, 0.0004, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-08 17:20:50,473 INFO [train.py:898] (2/4) Epoch 4, batch 450, loss[loss=0.3116, simple_loss=0.3744, pruned_loss=0.1244, over 18457.00 frames. ], tot_loss[loss=0.2567, simple_loss=0.3269, pruned_loss=0.09329, over 3220445.44 frames. ], batch size: 59, lr: 2.77e-02, grad_scale: 8.0 +2023-03-08 17:21:46,161 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11400.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:21:49,041 INFO [train.py:898] (2/4) Epoch 4, batch 500, loss[loss=0.2724, simple_loss=0.342, pruned_loss=0.1014, over 17730.00 frames. ], tot_loss[loss=0.2565, simple_loss=0.3266, pruned_loss=0.09316, over 3308937.26 frames. ], batch size: 70, lr: 2.76e-02, grad_scale: 4.0 +2023-03-08 17:22:09,638 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.774e+02 5.147e+02 6.792e+02 8.631e+02 2.583e+03, threshold=1.358e+03, percent-clipped=9.0 +2023-03-08 17:22:20,435 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11430.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:22:40,089 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6872, 3.4508, 3.1512, 2.6927, 3.2403, 2.5926, 2.4249, 3.5640], + device='cuda:2'), covar=tensor([0.0036, 0.0085, 0.0077, 0.0147, 0.0077, 0.0183, 0.0241, 0.0058], + device='cuda:2'), in_proj_covar=tensor([0.0043, 0.0049, 0.0050, 0.0078, 0.0052, 0.0080, 0.0091, 0.0049], + device='cuda:2'), out_proj_covar=tensor([6.4416e-05, 7.8070e-05, 8.2358e-05, 1.2294e-04, 8.1075e-05, 1.2580e-04, + 1.4959e-04, 7.9236e-05], device='cuda:2') +2023-03-08 17:22:40,931 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11448.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:22:46,349 INFO [train.py:898] (2/4) Epoch 4, batch 550, loss[loss=0.2932, simple_loss=0.354, pruned_loss=0.1162, over 17981.00 frames. ], tot_loss[loss=0.2569, simple_loss=0.327, pruned_loss=0.09343, over 3373422.71 frames. ], batch size: 65, lr: 2.76e-02, grad_scale: 4.0 +2023-03-08 17:22:49,171 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3079, 3.1680, 1.8007, 4.0827, 2.7920, 4.2835, 2.0003, 3.7281], + device='cuda:2'), covar=tensor([0.0645, 0.0954, 0.1623, 0.0335, 0.1035, 0.0094, 0.1326, 0.0313], + device='cuda:2'), in_proj_covar=tensor([0.0136, 0.0176, 0.0158, 0.0123, 0.0153, 0.0087, 0.0157, 0.0140], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 17:23:17,135 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11478.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:23:45,233 INFO [train.py:898] (2/4) Epoch 4, batch 600, loss[loss=0.2404, simple_loss=0.3111, pruned_loss=0.08486, over 18541.00 frames. ], tot_loss[loss=0.2572, simple_loss=0.3271, pruned_loss=0.09367, over 3422614.61 frames. ], batch size: 49, lr: 2.75e-02, grad_scale: 4.0 +2023-03-08 17:24:07,206 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.433e+02 4.507e+02 5.343e+02 7.594e+02 1.826e+03, threshold=1.069e+03, percent-clipped=2.0 +2023-03-08 17:24:17,749 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11530.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:24:20,114 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4854, 5.0569, 5.5995, 5.3985, 5.2731, 5.9911, 5.6035, 5.5069], + device='cuda:2'), covar=tensor([0.0648, 0.0472, 0.0562, 0.0432, 0.1133, 0.0623, 0.0558, 0.1200], + device='cuda:2'), in_proj_covar=tensor([0.0208, 0.0156, 0.0162, 0.0160, 0.0207, 0.0229, 0.0152, 0.0226], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-08 17:24:20,236 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11532.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:24:30,023 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11541.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 17:24:43,295 INFO [train.py:898] (2/4) Epoch 4, batch 650, loss[loss=0.2518, simple_loss=0.3323, pruned_loss=0.08563, over 18487.00 frames. ], tot_loss[loss=0.2565, simple_loss=0.3266, pruned_loss=0.09326, over 3471702.93 frames. ], batch size: 53, lr: 2.75e-02, grad_scale: 4.0 +2023-03-08 17:25:25,675 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11589.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 17:25:30,272 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4375, 3.3039, 1.7800, 4.2442, 2.8624, 4.4565, 2.1536, 4.0267], + device='cuda:2'), covar=tensor([0.0564, 0.1085, 0.1832, 0.0390, 0.1098, 0.0127, 0.1373, 0.0319], + device='cuda:2'), in_proj_covar=tensor([0.0128, 0.0171, 0.0151, 0.0121, 0.0148, 0.0084, 0.0150, 0.0136], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 17:25:30,298 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11593.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:25:41,338 INFO [train.py:898] (2/4) Epoch 4, batch 700, loss[loss=0.245, simple_loss=0.3186, pruned_loss=0.08575, over 18363.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.326, pruned_loss=0.09327, over 3499410.52 frames. ], batch size: 46, lr: 2.74e-02, grad_scale: 4.0 +2023-03-08 17:25:46,727 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-08 17:26:03,621 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.297e+02 5.310e+02 6.723e+02 8.319e+02 1.846e+03, threshold=1.345e+03, percent-clipped=5.0 +2023-03-08 17:26:19,727 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11635.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:26:39,989 INFO [train.py:898] (2/4) Epoch 4, batch 750, loss[loss=0.237, simple_loss=0.2916, pruned_loss=0.09118, over 18411.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.3254, pruned_loss=0.09293, over 3520581.85 frames. ], batch size: 43, lr: 2.74e-02, grad_scale: 4.0 +2023-03-08 17:27:31,260 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11696.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:27:34,694 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11699.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:27:38,713 INFO [train.py:898] (2/4) Epoch 4, batch 800, loss[loss=0.2687, simple_loss=0.3368, pruned_loss=0.1003, over 18490.00 frames. ], tot_loss[loss=0.2557, simple_loss=0.3254, pruned_loss=0.09303, over 3525727.16 frames. ], batch size: 51, lr: 2.73e-02, grad_scale: 8.0 +2023-03-08 17:27:42,811 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.68 vs. limit=2.0 +2023-03-08 17:28:00,219 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.738e+02 5.059e+02 5.928e+02 8.034e+02 1.891e+03, threshold=1.186e+03, percent-clipped=2.0 +2023-03-08 17:28:37,659 INFO [train.py:898] (2/4) Epoch 4, batch 850, loss[loss=0.2636, simple_loss=0.336, pruned_loss=0.09563, over 18472.00 frames. ], tot_loss[loss=0.257, simple_loss=0.3266, pruned_loss=0.0937, over 3533572.07 frames. ], batch size: 53, lr: 2.73e-02, grad_scale: 8.0 +2023-03-08 17:28:46,038 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11760.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:29:37,490 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-08 17:29:37,623 INFO [train.py:898] (2/4) Epoch 4, batch 900, loss[loss=0.2498, simple_loss=0.3273, pruned_loss=0.08612, over 18379.00 frames. ], tot_loss[loss=0.257, simple_loss=0.3266, pruned_loss=0.09368, over 3539234.03 frames. ], batch size: 55, lr: 2.72e-02, grad_scale: 8.0 +2023-03-08 17:29:52,607 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11816.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:29:58,020 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.973e+02 4.760e+02 5.740e+02 6.849e+02 1.554e+03, threshold=1.148e+03, percent-clipped=4.0 +2023-03-08 17:30:11,186 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11830.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:30:29,230 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11846.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:30:33,753 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2228, 5.2146, 4.4884, 5.1412, 5.1609, 4.5231, 5.1349, 4.6649], + device='cuda:2'), covar=tensor([0.0291, 0.0301, 0.1675, 0.0518, 0.0283, 0.0355, 0.0256, 0.0551], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0277, 0.0441, 0.0228, 0.0210, 0.0258, 0.0273, 0.0328], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-08 17:30:36,937 INFO [train.py:898] (2/4) Epoch 4, batch 950, loss[loss=0.2256, simple_loss=0.2916, pruned_loss=0.07983, over 17634.00 frames. ], tot_loss[loss=0.2546, simple_loss=0.3248, pruned_loss=0.09225, over 3556132.77 frames. ], batch size: 39, lr: 2.72e-02, grad_scale: 8.0 +2023-03-08 17:31:05,066 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11877.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:31:06,064 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=11878.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:31:18,406 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11888.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:31:35,206 INFO [train.py:898] (2/4) Epoch 4, batch 1000, loss[loss=0.2223, simple_loss=0.2871, pruned_loss=0.07879, over 18502.00 frames. ], tot_loss[loss=0.2556, simple_loss=0.326, pruned_loss=0.09259, over 3556802.96 frames. ], batch size: 44, lr: 2.71e-02, grad_scale: 8.0 +2023-03-08 17:31:40,096 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11907.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:31:49,680 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.96 vs. limit=2.0 +2023-03-08 17:31:54,942 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.2195, 5.3253, 2.8275, 4.9896, 5.0095, 5.3193, 5.1341, 2.4141], + device='cuda:2'), covar=tensor([0.0146, 0.0056, 0.0682, 0.0066, 0.0064, 0.0066, 0.0099, 0.1231], + device='cuda:2'), in_proj_covar=tensor([0.0057, 0.0044, 0.0076, 0.0052, 0.0053, 0.0045, 0.0056, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0004], + device='cuda:2') +2023-03-08 17:31:54,978 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11920.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:31:55,671 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.070e+02 4.938e+02 5.692e+02 6.896e+02 1.055e+03, threshold=1.138e+03, percent-clipped=0.0 +2023-03-08 17:32:28,208 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11948.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:32:33,399 INFO [train.py:898] (2/4) Epoch 4, batch 1050, loss[loss=0.2428, simple_loss=0.3114, pruned_loss=0.08705, over 18392.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.3249, pruned_loss=0.092, over 3575057.42 frames. ], batch size: 48, lr: 2.71e-02, grad_scale: 8.0 +2023-03-08 17:33:05,208 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11981.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:33:08,842 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11984.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:33:14,214 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.94 vs. limit=5.0 +2023-03-08 17:33:17,594 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11991.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:33:35,194 INFO [train.py:898] (2/4) Epoch 4, batch 1100, loss[loss=0.2515, simple_loss=0.3291, pruned_loss=0.08698, over 18490.00 frames. ], tot_loss[loss=0.2547, simple_loss=0.3249, pruned_loss=0.09227, over 3576216.78 frames. ], batch size: 53, lr: 2.70e-02, grad_scale: 4.0 +2023-03-08 17:33:42,514 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12009.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:33:56,849 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.224e+02 5.074e+02 6.054e+02 6.990e+02 2.904e+03, threshold=1.211e+03, percent-clipped=5.0 +2023-03-08 17:34:25,535 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12045.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:34:34,310 INFO [train.py:898] (2/4) Epoch 4, batch 1150, loss[loss=0.2113, simple_loss=0.2835, pruned_loss=0.06961, over 18374.00 frames. ], tot_loss[loss=0.2538, simple_loss=0.324, pruned_loss=0.09182, over 3579622.20 frames. ], batch size: 46, lr: 2.70e-02, grad_scale: 4.0 +2023-03-08 17:34:36,785 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12055.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:34:40,478 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2741, 5.2637, 4.5790, 5.2335, 5.2404, 4.6899, 5.2185, 4.7529], + device='cuda:2'), covar=tensor([0.0304, 0.0288, 0.1519, 0.0529, 0.0327, 0.0342, 0.0292, 0.0480], + device='cuda:2'), in_proj_covar=tensor([0.0262, 0.0280, 0.0430, 0.0231, 0.0211, 0.0259, 0.0279, 0.0327], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-08 17:35:00,892 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12076.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:35:23,377 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.2427, 3.9441, 4.8476, 3.8719, 2.9836, 2.5693, 4.1341, 4.9900], + device='cuda:2'), covar=tensor([0.0865, 0.0625, 0.0062, 0.0298, 0.0762, 0.0956, 0.0226, 0.0027], + device='cuda:2'), in_proj_covar=tensor([0.0122, 0.0113, 0.0058, 0.0113, 0.0142, 0.0147, 0.0113, 0.0056], + device='cuda:2'), out_proj_covar=tensor([1.9845e-04, 1.9451e-04, 9.9382e-05, 1.8396e-04, 2.1915e-04, 2.2989e-04, + 1.8275e-04, 9.0611e-05], device='cuda:2') +2023-03-08 17:35:32,889 INFO [train.py:898] (2/4) Epoch 4, batch 1200, loss[loss=0.2029, simple_loss=0.2759, pruned_loss=0.06499, over 17702.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.3243, pruned_loss=0.09227, over 3577363.50 frames. ], batch size: 39, lr: 2.69e-02, grad_scale: 8.0 +2023-03-08 17:35:34,793 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-03-08 17:35:54,813 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.657e+02 4.866e+02 5.977e+02 7.705e+02 1.703e+03, threshold=1.195e+03, percent-clipped=4.0 +2023-03-08 17:36:12,379 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12137.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:36:31,683 INFO [train.py:898] (2/4) Epoch 4, batch 1250, loss[loss=0.2582, simple_loss=0.3353, pruned_loss=0.0906, over 18352.00 frames. ], tot_loss[loss=0.2563, simple_loss=0.3262, pruned_loss=0.09323, over 3585556.95 frames. ], batch size: 50, lr: 2.69e-02, grad_scale: 8.0 +2023-03-08 17:36:40,591 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-08 17:36:49,009 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3188, 4.5617, 2.1566, 4.8478, 5.4152, 2.4687, 4.2899, 4.1631], + device='cuda:2'), covar=tensor([0.0068, 0.0723, 0.1592, 0.0290, 0.0035, 0.1481, 0.0490, 0.0629], + device='cuda:2'), in_proj_covar=tensor([0.0076, 0.0129, 0.0166, 0.0150, 0.0070, 0.0158, 0.0171, 0.0161], + device='cuda:2'), out_proj_covar=tensor([1.0059e-04, 1.8677e-04, 2.0409e-04, 1.8997e-04, 9.2801e-05, 2.0101e-04, + 2.1425e-04, 2.0977e-04], device='cuda:2') +2023-03-08 17:36:53,297 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12172.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:36:59,378 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-08 17:37:11,259 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12188.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:37:29,499 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12202.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:37:30,437 INFO [train.py:898] (2/4) Epoch 4, batch 1300, loss[loss=0.2228, simple_loss=0.2966, pruned_loss=0.07453, over 18377.00 frames. ], tot_loss[loss=0.2572, simple_loss=0.3269, pruned_loss=0.09372, over 3576615.48 frames. ], batch size: 46, lr: 2.68e-02, grad_scale: 4.0 +2023-03-08 17:37:52,909 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.709e+02 4.849e+02 5.902e+02 7.729e+02 1.516e+03, threshold=1.180e+03, percent-clipped=2.0 +2023-03-08 17:38:07,783 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12236.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:38:29,658 INFO [train.py:898] (2/4) Epoch 4, batch 1350, loss[loss=0.2182, simple_loss=0.291, pruned_loss=0.07276, over 18361.00 frames. ], tot_loss[loss=0.2567, simple_loss=0.327, pruned_loss=0.09326, over 3590922.34 frames. ], batch size: 46, lr: 2.68e-02, grad_scale: 4.0 +2023-03-08 17:38:34,654 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5919, 3.2773, 1.8347, 4.1919, 2.9756, 4.4918, 2.2979, 4.0539], + device='cuda:2'), covar=tensor([0.0448, 0.1008, 0.1741, 0.0303, 0.0934, 0.0090, 0.1210, 0.0325], + device='cuda:2'), in_proj_covar=tensor([0.0136, 0.0181, 0.0159, 0.0132, 0.0158, 0.0090, 0.0159, 0.0143], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 17:38:56,148 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12276.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:39:13,239 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12291.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:39:27,798 INFO [train.py:898] (2/4) Epoch 4, batch 1400, loss[loss=0.2514, simple_loss=0.3123, pruned_loss=0.09525, over 18128.00 frames. ], tot_loss[loss=0.2544, simple_loss=0.3248, pruned_loss=0.09202, over 3604062.35 frames. ], batch size: 44, lr: 2.67e-02, grad_scale: 4.0 +2023-03-08 17:39:29,715 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12304.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:39:51,202 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.094e+02 5.084e+02 6.027e+02 7.946e+02 1.309e+03, threshold=1.205e+03, percent-clipped=1.0 +2023-03-08 17:40:09,880 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12339.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:40:11,037 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12340.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:40:26,788 INFO [train.py:898] (2/4) Epoch 4, batch 1450, loss[loss=0.2379, simple_loss=0.3061, pruned_loss=0.08485, over 18415.00 frames. ], tot_loss[loss=0.2532, simple_loss=0.3235, pruned_loss=0.09141, over 3592903.06 frames. ], batch size: 48, lr: 2.67e-02, grad_scale: 4.0 +2023-03-08 17:40:29,929 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12355.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:41:23,193 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-03-08 17:41:24,333 INFO [train.py:898] (2/4) Epoch 4, batch 1500, loss[loss=0.281, simple_loss=0.351, pruned_loss=0.1055, over 18365.00 frames. ], tot_loss[loss=0.2536, simple_loss=0.3244, pruned_loss=0.09141, over 3586829.40 frames. ], batch size: 55, lr: 2.66e-02, grad_scale: 4.0 +2023-03-08 17:41:24,586 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12403.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:41:48,830 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.804e+02 4.633e+02 5.942e+02 7.166e+02 1.765e+03, threshold=1.188e+03, percent-clipped=4.0 +2023-03-08 17:41:59,116 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12432.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:42:03,822 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3585, 3.2566, 2.9400, 2.7138, 3.0540, 2.6723, 2.4770, 3.3475], + device='cuda:2'), covar=tensor([0.0042, 0.0051, 0.0086, 0.0147, 0.0107, 0.0146, 0.0203, 0.0065], + device='cuda:2'), in_proj_covar=tensor([0.0045, 0.0054, 0.0055, 0.0081, 0.0052, 0.0083, 0.0095, 0.0050], + device='cuda:2'), out_proj_covar=tensor([6.7547e-05, 8.6334e-05, 9.0464e-05, 1.3028e-04, 8.0974e-05, 1.3152e-04, + 1.5547e-04, 8.0580e-05], device='cuda:2') +2023-03-08 17:42:15,181 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12446.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:42:22,746 INFO [train.py:898] (2/4) Epoch 4, batch 1550, loss[loss=0.213, simple_loss=0.2852, pruned_loss=0.07046, over 18142.00 frames. ], tot_loss[loss=0.2536, simple_loss=0.3246, pruned_loss=0.09126, over 3598661.51 frames. ], batch size: 44, lr: 2.66e-02, grad_scale: 4.0 +2023-03-08 17:42:45,810 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12472.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:43:06,272 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5628, 3.4844, 4.4191, 2.9679, 3.6398, 2.8982, 2.9069, 1.9950], + device='cuda:2'), covar=tensor([0.0595, 0.0424, 0.0057, 0.0385, 0.0458, 0.1205, 0.0898, 0.1309], + device='cuda:2'), in_proj_covar=tensor([0.0141, 0.0142, 0.0070, 0.0118, 0.0158, 0.0195, 0.0141, 0.0163], + device='cuda:2'), out_proj_covar=tensor([1.4069e-04, 1.4673e-04, 7.3828e-05, 1.1998e-04, 1.6348e-04, 1.9604e-04, + 1.5601e-04, 1.6416e-04], device='cuda:2') +2023-03-08 17:43:19,604 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12502.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:43:20,382 INFO [train.py:898] (2/4) Epoch 4, batch 1600, loss[loss=0.2491, simple_loss=0.3161, pruned_loss=0.091, over 18365.00 frames. ], tot_loss[loss=0.2538, simple_loss=0.3243, pruned_loss=0.09164, over 3600319.38 frames. ], batch size: 46, lr: 2.65e-02, grad_scale: 8.0 +2023-03-08 17:43:25,803 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12507.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:43:26,770 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2527, 5.2201, 4.6618, 5.2421, 5.1331, 4.5360, 5.1359, 4.6471], + device='cuda:2'), covar=tensor([0.0280, 0.0325, 0.1337, 0.0484, 0.0388, 0.0385, 0.0263, 0.0628], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0281, 0.0434, 0.0231, 0.0216, 0.0266, 0.0287, 0.0343], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0003, 0.0003, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 17:43:41,778 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12520.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:43:44,958 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.642e+02 4.900e+02 6.087e+02 7.463e+02 1.966e+03, threshold=1.217e+03, percent-clipped=9.0 +2023-03-08 17:44:15,881 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12550.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:44:19,019 INFO [train.py:898] (2/4) Epoch 4, batch 1650, loss[loss=0.304, simple_loss=0.3549, pruned_loss=0.1265, over 12797.00 frames. ], tot_loss[loss=0.254, simple_loss=0.3247, pruned_loss=0.09168, over 3594610.34 frames. ], batch size: 129, lr: 2.65e-02, grad_scale: 8.0 +2023-03-08 17:44:48,188 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12576.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:44:50,520 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.4412, 4.2723, 4.4833, 4.4278, 4.2356, 4.3416, 4.7093, 4.6899], + device='cuda:2'), covar=tensor([0.0069, 0.0126, 0.0102, 0.0079, 0.0109, 0.0122, 0.0091, 0.0085], + device='cuda:2'), in_proj_covar=tensor([0.0060, 0.0047, 0.0044, 0.0057, 0.0051, 0.0065, 0.0054, 0.0052], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 17:45:18,528 INFO [train.py:898] (2/4) Epoch 4, batch 1700, loss[loss=0.2096, simple_loss=0.2825, pruned_loss=0.06835, over 18152.00 frames. ], tot_loss[loss=0.2528, simple_loss=0.3239, pruned_loss=0.09084, over 3604374.54 frames. ], batch size: 44, lr: 2.65e-02, grad_scale: 8.0 +2023-03-08 17:45:19,953 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12604.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:45:43,560 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.156e+02 4.705e+02 5.777e+02 7.062e+02 1.643e+03, threshold=1.155e+03, percent-clipped=4.0 +2023-03-08 17:45:44,936 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12624.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:45:55,392 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-08 17:46:02,692 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12640.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:46:15,976 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12652.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:46:16,913 INFO [train.py:898] (2/4) Epoch 4, batch 1750, loss[loss=0.2667, simple_loss=0.3425, pruned_loss=0.09541, over 17980.00 frames. ], tot_loss[loss=0.2531, simple_loss=0.3241, pruned_loss=0.09108, over 3594363.36 frames. ], batch size: 65, lr: 2.64e-02, grad_scale: 8.0 +2023-03-08 17:46:58,849 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12688.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:47:15,807 INFO [train.py:898] (2/4) Epoch 4, batch 1800, loss[loss=0.2436, simple_loss=0.3215, pruned_loss=0.08285, over 17838.00 frames. ], tot_loss[loss=0.2527, simple_loss=0.3236, pruned_loss=0.09094, over 3586550.44 frames. ], batch size: 70, lr: 2.64e-02, grad_scale: 8.0 +2023-03-08 17:47:39,919 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.294e+02 5.100e+02 5.998e+02 7.451e+02 2.129e+03, threshold=1.200e+03, percent-clipped=3.0 +2023-03-08 17:47:47,112 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.60 vs. limit=2.0 +2023-03-08 17:47:51,116 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12732.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:48:15,110 INFO [train.py:898] (2/4) Epoch 4, batch 1850, loss[loss=0.2811, simple_loss=0.3489, pruned_loss=0.1067, over 17264.00 frames. ], tot_loss[loss=0.253, simple_loss=0.3239, pruned_loss=0.09106, over 3583815.21 frames. ], batch size: 78, lr: 2.63e-02, grad_scale: 8.0 +2023-03-08 17:48:26,865 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12763.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:48:47,862 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=12780.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:49:02,957 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.65 vs. limit=2.0 +2023-03-08 17:49:12,645 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12802.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:49:13,594 INFO [train.py:898] (2/4) Epoch 4, batch 1900, loss[loss=0.2186, simple_loss=0.2926, pruned_loss=0.07229, over 18498.00 frames. ], tot_loss[loss=0.2521, simple_loss=0.3228, pruned_loss=0.09071, over 3594696.87 frames. ], batch size: 47, lr: 2.63e-02, grad_scale: 8.0 +2023-03-08 17:49:36,777 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.387e+02 4.784e+02 5.928e+02 6.975e+02 1.301e+03, threshold=1.186e+03, percent-clipped=1.0 +2023-03-08 17:49:38,342 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12824.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 17:50:11,780 INFO [train.py:898] (2/4) Epoch 4, batch 1950, loss[loss=0.2568, simple_loss=0.3327, pruned_loss=0.09044, over 18481.00 frames. ], tot_loss[loss=0.2526, simple_loss=0.3234, pruned_loss=0.09088, over 3596927.92 frames. ], batch size: 59, lr: 2.62e-02, grad_scale: 8.0 +2023-03-08 17:50:18,720 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.3218, 5.5306, 3.2497, 5.1277, 5.1097, 5.5153, 5.3571, 2.5528], + device='cuda:2'), covar=tensor([0.0121, 0.0040, 0.0580, 0.0061, 0.0077, 0.0050, 0.0073, 0.1058], + device='cuda:2'), in_proj_covar=tensor([0.0061, 0.0046, 0.0078, 0.0055, 0.0055, 0.0047, 0.0058, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0004], + device='cuda:2') +2023-03-08 17:50:27,691 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4665, 5.0961, 5.4766, 5.4609, 5.2060, 5.9490, 5.5944, 5.2668], + device='cuda:2'), covar=tensor([0.0528, 0.0501, 0.0494, 0.0329, 0.1082, 0.0531, 0.0400, 0.1301], + device='cuda:2'), in_proj_covar=tensor([0.0221, 0.0165, 0.0173, 0.0168, 0.0221, 0.0246, 0.0162, 0.0237], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-08 17:51:10,772 INFO [train.py:898] (2/4) Epoch 4, batch 2000, loss[loss=0.2438, simple_loss=0.3221, pruned_loss=0.08279, over 18569.00 frames. ], tot_loss[loss=0.2521, simple_loss=0.3234, pruned_loss=0.09042, over 3597945.96 frames. ], batch size: 54, lr: 2.62e-02, grad_scale: 8.0 +2023-03-08 17:51:33,456 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.610e+02 4.786e+02 5.613e+02 6.495e+02 1.703e+03, threshold=1.123e+03, percent-clipped=3.0 +2023-03-08 17:52:08,666 INFO [train.py:898] (2/4) Epoch 4, batch 2050, loss[loss=0.2264, simple_loss=0.2916, pruned_loss=0.08054, over 18514.00 frames. ], tot_loss[loss=0.252, simple_loss=0.3227, pruned_loss=0.09064, over 3586948.76 frames. ], batch size: 44, lr: 2.61e-02, grad_scale: 8.0 +2023-03-08 17:53:07,612 INFO [train.py:898] (2/4) Epoch 4, batch 2100, loss[loss=0.2751, simple_loss=0.3402, pruned_loss=0.105, over 18544.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.3219, pruned_loss=0.09017, over 3592629.62 frames. ], batch size: 49, lr: 2.61e-02, grad_scale: 8.0 +2023-03-08 17:53:08,013 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4790, 4.0626, 4.0397, 3.6214, 3.3041, 3.0184, 2.2511, 1.7140], + device='cuda:2'), covar=tensor([0.0178, 0.0207, 0.0073, 0.0154, 0.0271, 0.0226, 0.0685, 0.0923], + device='cuda:2'), in_proj_covar=tensor([0.0031, 0.0033, 0.0025, 0.0036, 0.0052, 0.0029, 0.0053, 0.0058], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 17:53:29,883 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.835e+02 4.911e+02 6.247e+02 7.223e+02 1.206e+03, threshold=1.249e+03, percent-clipped=2.0 +2023-03-08 17:54:06,017 INFO [train.py:898] (2/4) Epoch 4, batch 2150, loss[loss=0.2597, simple_loss=0.3335, pruned_loss=0.09295, over 17075.00 frames. ], tot_loss[loss=0.2521, simple_loss=0.3229, pruned_loss=0.09071, over 3577588.47 frames. ], batch size: 78, lr: 2.61e-02, grad_scale: 8.0 +2023-03-08 17:54:23,246 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6767, 3.6752, 2.1912, 3.9227, 4.6177, 2.4883, 3.5573, 3.3859], + device='cuda:2'), covar=tensor([0.0068, 0.1154, 0.1452, 0.0373, 0.0059, 0.1336, 0.0626, 0.0767], + device='cuda:2'), in_proj_covar=tensor([0.0076, 0.0138, 0.0170, 0.0153, 0.0070, 0.0159, 0.0174, 0.0169], + device='cuda:2'), out_proj_covar=tensor([1.0239e-04, 1.9817e-04, 2.1298e-04, 1.9827e-04, 9.4448e-05, 2.0584e-04, + 2.2046e-04, 2.2140e-04], device='cuda:2') +2023-03-08 17:55:03,442 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=13102.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:55:04,404 INFO [train.py:898] (2/4) Epoch 4, batch 2200, loss[loss=0.2405, simple_loss=0.3174, pruned_loss=0.08176, over 18096.00 frames. ], tot_loss[loss=0.252, simple_loss=0.3227, pruned_loss=0.09063, over 3587852.58 frames. ], batch size: 62, lr: 2.60e-02, grad_scale: 8.0 +2023-03-08 17:55:22,901 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=13119.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 17:55:27,150 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.452e+02 4.789e+02 5.834e+02 7.042e+02 2.257e+03, threshold=1.167e+03, percent-clipped=4.0 +2023-03-08 17:55:56,344 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2482, 1.6328, 3.8118, 2.6440, 3.3711, 4.7603, 4.1323, 4.0707], + device='cuda:2'), covar=tensor([0.0336, 0.1026, 0.0330, 0.0662, 0.1161, 0.0034, 0.0233, 0.0201], + device='cuda:2'), in_proj_covar=tensor([0.0128, 0.0170, 0.0105, 0.0163, 0.0256, 0.0094, 0.0139, 0.0126], + device='cuda:2'), out_proj_covar=tensor([1.0290e-04, 1.3553e-04, 9.2250e-05, 1.1995e-04, 2.0111e-04, 6.7843e-05, + 1.1087e-04, 9.9910e-05], device='cuda:2') +2023-03-08 17:55:59,505 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=13150.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 17:56:00,906 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6055, 3.1864, 1.6966, 4.1184, 2.7855, 4.5181, 1.8077, 3.8041], + device='cuda:2'), covar=tensor([0.0466, 0.0965, 0.1617, 0.0317, 0.0942, 0.0076, 0.1342, 0.0327], + device='cuda:2'), in_proj_covar=tensor([0.0143, 0.0178, 0.0160, 0.0140, 0.0157, 0.0097, 0.0160, 0.0146], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 17:56:02,814 INFO [train.py:898] (2/4) Epoch 4, batch 2250, loss[loss=0.3087, simple_loss=0.3597, pruned_loss=0.1288, over 12764.00 frames. ], tot_loss[loss=0.2521, simple_loss=0.3228, pruned_loss=0.09074, over 3584176.62 frames. ], batch size: 129, lr: 2.60e-02, grad_scale: 8.0 +2023-03-08 17:56:09,785 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.66 vs. limit=5.0 +2023-03-08 17:57:01,386 INFO [train.py:898] (2/4) Epoch 4, batch 2300, loss[loss=0.2435, simple_loss=0.3152, pruned_loss=0.08594, over 18555.00 frames. ], tot_loss[loss=0.251, simple_loss=0.3217, pruned_loss=0.09012, over 3578151.46 frames. ], batch size: 49, lr: 2.59e-02, grad_scale: 8.0 +2023-03-08 17:57:24,003 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6966, 5.2331, 5.3812, 5.1654, 4.8797, 5.1026, 4.5378, 5.1260], + device='cuda:2'), covar=tensor([0.0212, 0.0255, 0.0148, 0.0161, 0.0379, 0.0206, 0.1211, 0.0205], + device='cuda:2'), in_proj_covar=tensor([0.0119, 0.0159, 0.0138, 0.0129, 0.0151, 0.0155, 0.0225, 0.0140], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004], + device='cuda:2') +2023-03-08 17:57:24,771 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.647e+02 4.949e+02 6.048e+02 7.391e+02 1.554e+03, threshold=1.210e+03, percent-clipped=6.0 +2023-03-08 17:58:00,615 INFO [train.py:898] (2/4) Epoch 4, batch 2350, loss[loss=0.2366, simple_loss=0.3124, pruned_loss=0.08038, over 18479.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.3204, pruned_loss=0.08925, over 3589383.79 frames. ], batch size: 51, lr: 2.59e-02, grad_scale: 8.0 +2023-03-08 17:58:58,985 INFO [train.py:898] (2/4) Epoch 4, batch 2400, loss[loss=0.2349, simple_loss=0.3019, pruned_loss=0.08389, over 18495.00 frames. ], tot_loss[loss=0.2508, simple_loss=0.3221, pruned_loss=0.08977, over 3593989.83 frames. ], batch size: 47, lr: 2.58e-02, grad_scale: 8.0 +2023-03-08 17:59:23,024 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.316e+02 4.704e+02 5.884e+02 7.120e+02 1.441e+03, threshold=1.177e+03, percent-clipped=2.0 +2023-03-08 17:59:58,182 INFO [train.py:898] (2/4) Epoch 4, batch 2450, loss[loss=0.2641, simple_loss=0.334, pruned_loss=0.09713, over 15994.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3214, pruned_loss=0.08972, over 3591484.45 frames. ], batch size: 94, lr: 2.58e-02, grad_scale: 8.0 +2023-03-08 18:00:00,795 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4115, 5.3076, 4.6933, 5.3009, 5.3096, 4.6150, 5.2659, 4.8144], + device='cuda:2'), covar=tensor([0.0356, 0.0322, 0.1581, 0.0670, 0.0379, 0.0436, 0.0321, 0.0625], + device='cuda:2'), in_proj_covar=tensor([0.0274, 0.0296, 0.0451, 0.0238, 0.0224, 0.0271, 0.0298, 0.0359], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0003, 0.0003, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 18:00:16,873 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2634, 2.4787, 2.1494, 2.6763, 3.0220, 3.0375, 2.5984, 2.9658], + device='cuda:2'), covar=tensor([0.0381, 0.0233, 0.1051, 0.0484, 0.0277, 0.0217, 0.0529, 0.0245], + device='cuda:2'), in_proj_covar=tensor([0.0087, 0.0064, 0.0127, 0.0096, 0.0070, 0.0053, 0.0084, 0.0080], + device='cuda:2'), out_proj_covar=tensor([1.6779e-04, 1.2740e-04, 2.2769e-04, 1.7785e-04, 1.4009e-04, 9.7048e-05, + 1.6245e-04, 1.5396e-04], device='cuda:2') +2023-03-08 18:00:54,106 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-03-08 18:00:56,431 INFO [train.py:898] (2/4) Epoch 4, batch 2500, loss[loss=0.2175, simple_loss=0.2871, pruned_loss=0.07401, over 18256.00 frames. ], tot_loss[loss=0.2507, simple_loss=0.3215, pruned_loss=0.08992, over 3578344.72 frames. ], batch size: 45, lr: 2.58e-02, grad_scale: 8.0 +2023-03-08 18:01:16,466 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=13419.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:01:20,736 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.373e+02 5.229e+02 6.294e+02 7.709e+02 1.447e+03, threshold=1.259e+03, percent-clipped=4.0 +2023-03-08 18:01:55,105 INFO [train.py:898] (2/4) Epoch 4, batch 2550, loss[loss=0.2784, simple_loss=0.3463, pruned_loss=0.1053, over 17980.00 frames. ], tot_loss[loss=0.2504, simple_loss=0.3217, pruned_loss=0.08956, over 3585286.67 frames. ], batch size: 65, lr: 2.57e-02, grad_scale: 8.0 +2023-03-08 18:02:12,163 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=13467.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:02:53,613 INFO [train.py:898] (2/4) Epoch 4, batch 2600, loss[loss=0.2095, simple_loss=0.2794, pruned_loss=0.06982, over 18576.00 frames. ], tot_loss[loss=0.2502, simple_loss=0.3217, pruned_loss=0.08931, over 3585363.70 frames. ], batch size: 45, lr: 2.57e-02, grad_scale: 8.0 +2023-03-08 18:03:17,314 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.093e+02 4.722e+02 5.886e+02 7.226e+02 1.343e+03, threshold=1.177e+03, percent-clipped=1.0 +2023-03-08 18:03:51,845 INFO [train.py:898] (2/4) Epoch 4, batch 2650, loss[loss=0.2935, simple_loss=0.3587, pruned_loss=0.1141, over 18492.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3209, pruned_loss=0.0888, over 3597594.35 frames. ], batch size: 53, lr: 2.56e-02, grad_scale: 8.0 +2023-03-08 18:03:58,591 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.2655, 3.7970, 5.0252, 4.0114, 2.8743, 2.7288, 4.2721, 5.0602], + device='cuda:2'), covar=tensor([0.0950, 0.0849, 0.0041, 0.0292, 0.0851, 0.0984, 0.0257, 0.0031], + device='cuda:2'), in_proj_covar=tensor([0.0128, 0.0139, 0.0061, 0.0122, 0.0150, 0.0155, 0.0125, 0.0062], + device='cuda:2'), out_proj_covar=tensor([2.1015e-04, 2.3249e-04, 1.0970e-04, 2.0107e-04, 2.3254e-04, 2.4507e-04, + 2.0337e-04, 9.9746e-05], device='cuda:2') +2023-03-08 18:04:43,262 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.52 vs. limit=5.0 +2023-03-08 18:04:50,572 INFO [train.py:898] (2/4) Epoch 4, batch 2700, loss[loss=0.2203, simple_loss=0.296, pruned_loss=0.07224, over 18480.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.3211, pruned_loss=0.089, over 3587984.33 frames. ], batch size: 47, lr: 2.56e-02, grad_scale: 8.0 +2023-03-08 18:05:14,651 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.306e+02 5.163e+02 6.316e+02 7.971e+02 1.590e+03, threshold=1.263e+03, percent-clipped=4.0 +2023-03-08 18:05:48,659 INFO [train.py:898] (2/4) Epoch 4, batch 2750, loss[loss=0.2404, simple_loss=0.316, pruned_loss=0.0824, over 18028.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.321, pruned_loss=0.08925, over 3587645.64 frames. ], batch size: 65, lr: 2.55e-02, grad_scale: 8.0 +2023-03-08 18:06:47,206 INFO [train.py:898] (2/4) Epoch 4, batch 2800, loss[loss=0.2732, simple_loss=0.3492, pruned_loss=0.0986, over 16100.00 frames. ], tot_loss[loss=0.25, simple_loss=0.3217, pruned_loss=0.08914, over 3589267.23 frames. ], batch size: 94, lr: 2.55e-02, grad_scale: 8.0 +2023-03-08 18:07:03,857 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8852, 4.8416, 4.9925, 4.6813, 4.6600, 4.7167, 5.2339, 5.2173], + device='cuda:2'), covar=tensor([0.0061, 0.0074, 0.0069, 0.0088, 0.0087, 0.0103, 0.0055, 0.0078], + device='cuda:2'), in_proj_covar=tensor([0.0062, 0.0047, 0.0045, 0.0060, 0.0052, 0.0068, 0.0055, 0.0054], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 18:07:11,558 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.506e+02 4.982e+02 6.101e+02 7.616e+02 1.473e+03, threshold=1.220e+03, percent-clipped=5.0 +2023-03-08 18:07:28,135 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=13737.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:07:45,676 INFO [train.py:898] (2/4) Epoch 4, batch 2850, loss[loss=0.2428, simple_loss=0.3239, pruned_loss=0.08083, over 18356.00 frames. ], tot_loss[loss=0.2495, simple_loss=0.3211, pruned_loss=0.08893, over 3600040.40 frames. ], batch size: 55, lr: 2.55e-02, grad_scale: 8.0 +2023-03-08 18:08:16,443 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.1665, 4.6206, 4.7567, 4.5205, 4.2863, 4.4536, 3.9551, 4.5340], + device='cuda:2'), covar=tensor([0.0250, 0.0286, 0.0225, 0.0264, 0.0443, 0.0248, 0.1190, 0.0290], + device='cuda:2'), in_proj_covar=tensor([0.0118, 0.0158, 0.0139, 0.0127, 0.0152, 0.0157, 0.0223, 0.0142], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0004, 0.0004, 0.0005, 0.0005, 0.0004], + device='cuda:2') +2023-03-08 18:08:38,612 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=13798.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 18:08:43,998 INFO [train.py:898] (2/4) Epoch 4, batch 2900, loss[loss=0.2803, simple_loss=0.3477, pruned_loss=0.1065, over 18581.00 frames. ], tot_loss[loss=0.2477, simple_loss=0.3196, pruned_loss=0.08793, over 3600014.03 frames. ], batch size: 54, lr: 2.54e-02, grad_scale: 8.0 +2023-03-08 18:09:05,810 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-08 18:09:07,286 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.361e+02 5.191e+02 6.530e+02 8.692e+02 2.187e+03, threshold=1.306e+03, percent-clipped=7.0 +2023-03-08 18:09:43,235 INFO [train.py:898] (2/4) Epoch 4, batch 2950, loss[loss=0.293, simple_loss=0.3509, pruned_loss=0.1176, over 18357.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.3196, pruned_loss=0.08779, over 3598258.24 frames. ], batch size: 55, lr: 2.54e-02, grad_scale: 8.0 +2023-03-08 18:10:28,686 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-08 18:10:34,309 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9872, 5.0961, 5.1915, 4.8703, 4.8667, 4.9104, 5.3096, 5.2263], + device='cuda:2'), covar=tensor([0.0053, 0.0059, 0.0060, 0.0080, 0.0058, 0.0081, 0.0050, 0.0078], + device='cuda:2'), in_proj_covar=tensor([0.0062, 0.0046, 0.0046, 0.0060, 0.0052, 0.0068, 0.0054, 0.0055], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 18:10:40,647 INFO [train.py:898] (2/4) Epoch 4, batch 3000, loss[loss=0.2555, simple_loss=0.3366, pruned_loss=0.08723, over 18388.00 frames. ], tot_loss[loss=0.2477, simple_loss=0.3194, pruned_loss=0.08796, over 3594945.62 frames. ], batch size: 52, lr: 2.53e-02, grad_scale: 4.0 +2023-03-08 18:10:40,647 INFO [train.py:923] (2/4) Computing validation loss +2023-03-08 18:10:45,475 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0217, 4.2512, 4.3622, 4.2148, 3.8959, 4.0593, 4.3478, 4.2657], + device='cuda:2'), covar=tensor([0.1028, 0.0837, 0.0324, 0.0740, 0.2058, 0.0752, 0.0768, 0.0835], + device='cuda:2'), in_proj_covar=tensor([0.0360, 0.0298, 0.0222, 0.0321, 0.0455, 0.0326, 0.0352, 0.0300], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:2') +2023-03-08 18:10:52,597 INFO [train.py:932] (2/4) Epoch 4, validation: loss=0.1898, simple_loss=0.292, pruned_loss=0.04378, over 944034.00 frames. +2023-03-08 18:10:52,598 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-08 18:11:17,314 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.366e+02 5.126e+02 6.271e+02 8.521e+02 2.590e+03, threshold=1.254e+03, percent-clipped=11.0 +2023-03-08 18:11:34,648 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-08 18:11:50,804 INFO [train.py:898] (2/4) Epoch 4, batch 3050, loss[loss=0.2221, simple_loss=0.2887, pruned_loss=0.07781, over 18543.00 frames. ], tot_loss[loss=0.2471, simple_loss=0.319, pruned_loss=0.08755, over 3605529.73 frames. ], batch size: 45, lr: 2.53e-02, grad_scale: 4.0 +2023-03-08 18:12:18,869 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6036, 3.8510, 4.8978, 3.2386, 4.1735, 2.9376, 3.0325, 2.1397], + device='cuda:2'), covar=tensor([0.0594, 0.0401, 0.0042, 0.0354, 0.0369, 0.1258, 0.1325, 0.1141], + device='cuda:2'), in_proj_covar=tensor([0.0149, 0.0152, 0.0074, 0.0123, 0.0168, 0.0201, 0.0159, 0.0167], + device='cuda:2'), out_proj_covar=tensor([1.4626e-04, 1.5517e-04, 7.9142e-05, 1.2544e-04, 1.7114e-04, 2.0103e-04, + 1.7196e-04, 1.6684e-04], device='cuda:2') +2023-03-08 18:12:45,370 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-08 18:12:53,755 INFO [train.py:898] (2/4) Epoch 4, batch 3100, loss[loss=0.2193, simple_loss=0.2961, pruned_loss=0.07121, over 18176.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.3192, pruned_loss=0.08797, over 3589451.87 frames. ], batch size: 44, lr: 2.53e-02, grad_scale: 4.0 +2023-03-08 18:13:18,992 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.897e+02 4.672e+02 5.881e+02 7.283e+02 1.291e+03, threshold=1.176e+03, percent-clipped=1.0 +2023-03-08 18:13:52,252 INFO [train.py:898] (2/4) Epoch 4, batch 3150, loss[loss=0.2397, simple_loss=0.3103, pruned_loss=0.08456, over 18530.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.3182, pruned_loss=0.08708, over 3596678.91 frames. ], batch size: 49, lr: 2.52e-02, grad_scale: 4.0 +2023-03-08 18:14:15,379 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14072.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:14:18,871 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1847, 2.5356, 2.1829, 2.7121, 3.1190, 3.2612, 2.5225, 2.7238], + device='cuda:2'), covar=tensor([0.0458, 0.0344, 0.1001, 0.0398, 0.0310, 0.0184, 0.0567, 0.0355], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0066, 0.0123, 0.0091, 0.0071, 0.0053, 0.0084, 0.0084], + device='cuda:2'), out_proj_covar=tensor([1.7232e-04, 1.3054e-04, 2.2446e-04, 1.7077e-04, 1.4259e-04, 9.9548e-05, + 1.6235e-04, 1.6153e-04], device='cuda:2') +2023-03-08 18:14:27,572 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-08 18:14:40,064 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14093.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 18:14:51,081 INFO [train.py:898] (2/4) Epoch 4, batch 3200, loss[loss=0.2524, simple_loss=0.3301, pruned_loss=0.08738, over 18377.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.318, pruned_loss=0.0871, over 3601095.94 frames. ], batch size: 50, lr: 2.52e-02, grad_scale: 8.0 +2023-03-08 18:14:51,432 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14103.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:14:58,563 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-08 18:15:10,625 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14120.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 18:15:14,750 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.167e+02 5.012e+02 5.863e+02 7.076e+02 1.938e+03, threshold=1.173e+03, percent-clipped=6.0 +2023-03-08 18:15:23,239 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.02 vs. limit=5.0 +2023-03-08 18:15:26,386 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14133.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:15:49,267 INFO [train.py:898] (2/4) Epoch 4, batch 3250, loss[loss=0.2441, simple_loss=0.3262, pruned_loss=0.08103, over 18614.00 frames. ], tot_loss[loss=0.2468, simple_loss=0.3186, pruned_loss=0.08751, over 3592332.31 frames. ], batch size: 52, lr: 2.51e-02, grad_scale: 8.0 +2023-03-08 18:16:01,735 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14164.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:16:22,270 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14181.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 18:16:47,537 INFO [train.py:898] (2/4) Epoch 4, batch 3300, loss[loss=0.214, simple_loss=0.288, pruned_loss=0.06998, over 17674.00 frames. ], tot_loss[loss=0.247, simple_loss=0.3189, pruned_loss=0.08749, over 3580793.21 frames. ], batch size: 39, lr: 2.51e-02, grad_scale: 8.0 +2023-03-08 18:17:04,446 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2906, 5.1214, 5.5261, 5.2456, 5.1559, 6.0653, 5.6189, 5.4917], + device='cuda:2'), covar=tensor([0.0767, 0.0511, 0.0519, 0.0495, 0.1298, 0.0638, 0.0504, 0.1066], + device='cuda:2'), in_proj_covar=tensor([0.0231, 0.0173, 0.0178, 0.0171, 0.0222, 0.0252, 0.0163, 0.0240], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-08 18:17:10,860 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.117e+02 4.913e+02 6.024e+02 7.201e+02 1.529e+03, threshold=1.205e+03, percent-clipped=5.0 +2023-03-08 18:17:45,354 INFO [train.py:898] (2/4) Epoch 4, batch 3350, loss[loss=0.2338, simple_loss=0.3113, pruned_loss=0.07812, over 18374.00 frames. ], tot_loss[loss=0.2469, simple_loss=0.3187, pruned_loss=0.08755, over 3578094.43 frames. ], batch size: 56, lr: 2.51e-02, grad_scale: 8.0 +2023-03-08 18:17:55,383 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4706, 3.6385, 2.0050, 4.5018, 3.2000, 4.8505, 2.3141, 4.0949], + device='cuda:2'), covar=tensor([0.0542, 0.0764, 0.1495, 0.0333, 0.0853, 0.0075, 0.1193, 0.0337], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0185, 0.0162, 0.0146, 0.0158, 0.0101, 0.0162, 0.0151], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 18:18:13,619 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.1024, 3.2202, 4.4998, 4.2249, 2.9381, 4.7426, 3.8756, 3.0865], + device='cuda:2'), covar=tensor([0.0248, 0.0726, 0.0068, 0.0152, 0.1051, 0.0065, 0.0277, 0.0741], + device='cuda:2'), in_proj_covar=tensor([0.0124, 0.0157, 0.0087, 0.0099, 0.0173, 0.0114, 0.0122, 0.0162], + device='cuda:2'), out_proj_covar=tensor([1.2099e-04, 1.5339e-04, 8.9839e-05, 9.5444e-05, 1.6573e-04, 1.0746e-04, + 1.2805e-04, 1.6252e-04], device='cuda:2') +2023-03-08 18:18:44,689 INFO [train.py:898] (2/4) Epoch 4, batch 3400, loss[loss=0.2511, simple_loss=0.3286, pruned_loss=0.08677, over 17817.00 frames. ], tot_loss[loss=0.2467, simple_loss=0.3182, pruned_loss=0.08762, over 3568018.80 frames. ], batch size: 70, lr: 2.50e-02, grad_scale: 8.0 +2023-03-08 18:19:08,456 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.677e+02 5.094e+02 5.876e+02 7.439e+02 2.634e+03, threshold=1.175e+03, percent-clipped=7.0 +2023-03-08 18:19:42,053 INFO [train.py:898] (2/4) Epoch 4, batch 3450, loss[loss=0.25, simple_loss=0.3217, pruned_loss=0.08918, over 18122.00 frames. ], tot_loss[loss=0.246, simple_loss=0.3177, pruned_loss=0.08714, over 3576876.32 frames. ], batch size: 62, lr: 2.50e-02, grad_scale: 4.0 +2023-03-08 18:20:29,259 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14393.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:20:40,689 INFO [train.py:898] (2/4) Epoch 4, batch 3500, loss[loss=0.2543, simple_loss=0.329, pruned_loss=0.08977, over 18361.00 frames. ], tot_loss[loss=0.247, simple_loss=0.3188, pruned_loss=0.08763, over 3573976.69 frames. ], batch size: 55, lr: 2.49e-02, grad_scale: 4.0 +2023-03-08 18:20:52,854 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14413.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:21:05,560 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.205e+02 4.879e+02 5.762e+02 7.140e+02 1.904e+03, threshold=1.152e+03, percent-clipped=3.0 +2023-03-08 18:21:08,168 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5949, 3.5929, 1.6290, 4.2217, 2.8704, 4.7303, 1.8342, 3.8714], + device='cuda:2'), covar=tensor([0.0551, 0.0816, 0.1745, 0.0443, 0.1067, 0.0099, 0.1389, 0.0355], + device='cuda:2'), in_proj_covar=tensor([0.0149, 0.0185, 0.0162, 0.0147, 0.0160, 0.0103, 0.0165, 0.0152], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 18:21:09,144 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14428.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:21:22,861 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14441.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:21:35,660 INFO [train.py:898] (2/4) Epoch 4, batch 3550, loss[loss=0.2144, simple_loss=0.2866, pruned_loss=0.07107, over 18252.00 frames. ], tot_loss[loss=0.2464, simple_loss=0.3183, pruned_loss=0.08728, over 3576722.92 frames. ], batch size: 45, lr: 2.49e-02, grad_scale: 4.0 +2023-03-08 18:21:42,310 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14459.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:21:45,467 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8378, 3.6375, 3.8797, 3.7253, 3.6890, 3.6090, 3.9988, 3.9946], + device='cuda:2'), covar=tensor([0.0066, 0.0097, 0.0067, 0.0080, 0.0070, 0.0106, 0.0066, 0.0083], + device='cuda:2'), in_proj_covar=tensor([0.0062, 0.0048, 0.0046, 0.0060, 0.0051, 0.0068, 0.0056, 0.0056], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 18:21:58,341 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14474.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:22:00,251 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14476.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 18:22:13,145 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14487.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:22:30,848 INFO [train.py:898] (2/4) Epoch 4, batch 3600, loss[loss=0.2619, simple_loss=0.3175, pruned_loss=0.1031, over 17802.00 frames. ], tot_loss[loss=0.2482, simple_loss=0.3198, pruned_loss=0.08829, over 3550460.68 frames. ], batch size: 39, lr: 2.49e-02, grad_scale: 8.0 +2023-03-08 18:22:52,356 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-03-08 18:22:53,544 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.017e+02 5.164e+02 6.546e+02 8.016e+02 1.916e+03, threshold=1.309e+03, percent-clipped=5.0 +2023-03-08 18:23:35,185 INFO [train.py:898] (2/4) Epoch 5, batch 0, loss[loss=0.2734, simple_loss=0.3398, pruned_loss=0.1035, over 17856.00 frames. ], tot_loss[loss=0.2734, simple_loss=0.3398, pruned_loss=0.1035, over 17856.00 frames. ], batch size: 70, lr: 2.31e-02, grad_scale: 8.0 +2023-03-08 18:23:35,186 INFO [train.py:923] (2/4) Computing validation loss +2023-03-08 18:23:46,756 INFO [train.py:932] (2/4) Epoch 5, validation: loss=0.1908, simple_loss=0.2926, pruned_loss=0.04454, over 944034.00 frames. +2023-03-08 18:23:46,757 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-08 18:23:59,485 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14548.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:24:44,283 INFO [train.py:898] (2/4) Epoch 5, batch 50, loss[loss=0.2662, simple_loss=0.334, pruned_loss=0.09926, over 16083.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.3169, pruned_loss=0.08527, over 812959.20 frames. ], batch size: 94, lr: 2.31e-02, grad_scale: 8.0 +2023-03-08 18:25:29,118 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.345e+02 4.923e+02 5.805e+02 7.432e+02 1.503e+03, threshold=1.161e+03, percent-clipped=2.0 +2023-03-08 18:25:43,381 INFO [train.py:898] (2/4) Epoch 5, batch 100, loss[loss=0.2426, simple_loss=0.3196, pruned_loss=0.08284, over 18318.00 frames. ], tot_loss[loss=0.2394, simple_loss=0.3135, pruned_loss=0.08263, over 1435693.23 frames. ], batch size: 54, lr: 2.31e-02, grad_scale: 8.0 +2023-03-08 18:25:50,487 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6268, 3.5211, 1.6734, 4.2557, 2.8788, 4.5705, 2.0273, 4.1236], + device='cuda:2'), covar=tensor([0.0440, 0.0821, 0.1674, 0.0327, 0.0901, 0.0100, 0.1176, 0.0277], + device='cuda:2'), in_proj_covar=tensor([0.0143, 0.0182, 0.0160, 0.0145, 0.0153, 0.0104, 0.0159, 0.0149], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 18:26:01,033 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.21 vs. limit=5.0 +2023-03-08 18:26:13,680 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-08 18:26:41,860 INFO [train.py:898] (2/4) Epoch 5, batch 150, loss[loss=0.2666, simple_loss=0.3289, pruned_loss=0.1021, over 15668.00 frames. ], tot_loss[loss=0.2422, simple_loss=0.3158, pruned_loss=0.08429, over 1909683.85 frames. ], batch size: 94, lr: 2.30e-02, grad_scale: 8.0 +2023-03-08 18:26:53,019 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.68 vs. limit=2.0 +2023-03-08 18:27:24,692 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5524, 3.6603, 1.9864, 4.4991, 3.0988, 4.6940, 2.0152, 4.3683], + device='cuda:2'), covar=tensor([0.0500, 0.0833, 0.1498, 0.0278, 0.0873, 0.0115, 0.1370, 0.0255], + device='cuda:2'), in_proj_covar=tensor([0.0149, 0.0187, 0.0163, 0.0148, 0.0157, 0.0108, 0.0165, 0.0153], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 18:27:26,969 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.743e+02 4.388e+02 5.420e+02 7.125e+02 1.692e+03, threshold=1.084e+03, percent-clipped=3.0 +2023-03-08 18:27:30,703 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14728.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:27:40,875 INFO [train.py:898] (2/4) Epoch 5, batch 200, loss[loss=0.2014, simple_loss=0.2838, pruned_loss=0.05955, over 18415.00 frames. ], tot_loss[loss=0.2406, simple_loss=0.3144, pruned_loss=0.08338, over 2295168.73 frames. ], batch size: 48, lr: 2.30e-02, grad_scale: 8.0 +2023-03-08 18:27:45,130 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14740.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:28:00,195 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9469, 1.8173, 3.4425, 2.6530, 3.5498, 5.0903, 4.2757, 4.2658], + device='cuda:2'), covar=tensor([0.0319, 0.0900, 0.0504, 0.0586, 0.0874, 0.0022, 0.0203, 0.0121], + device='cuda:2'), in_proj_covar=tensor([0.0137, 0.0180, 0.0129, 0.0173, 0.0258, 0.0097, 0.0150, 0.0127], + device='cuda:2'), out_proj_covar=tensor([1.0639e-04, 1.4047e-04, 1.0983e-04, 1.2284e-04, 1.9869e-04, 6.8844e-05, + 1.1622e-04, 9.6981e-05], device='cuda:2') +2023-03-08 18:28:06,704 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14759.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:28:17,923 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14769.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:28:26,647 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-03-08 18:28:26,990 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14776.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:28:27,154 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14776.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 18:28:28,866 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6285, 3.4000, 3.1102, 2.7271, 3.1562, 2.7263, 2.0814, 3.5741], + device='cuda:2'), covar=tensor([0.0024, 0.0073, 0.0071, 0.0138, 0.0075, 0.0154, 0.0280, 0.0044], + device='cuda:2'), in_proj_covar=tensor([0.0050, 0.0065, 0.0061, 0.0094, 0.0059, 0.0096, 0.0107, 0.0055], + device='cuda:2'), out_proj_covar=tensor([7.4654e-05, 1.0302e-04, 1.0055e-04, 1.5234e-04, 9.1792e-05, 1.5358e-04, + 1.7305e-04, 8.7964e-05], device='cuda:2') +2023-03-08 18:28:39,580 INFO [train.py:898] (2/4) Epoch 5, batch 250, loss[loss=0.2616, simple_loss=0.3359, pruned_loss=0.0937, over 18338.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.3146, pruned_loss=0.08325, over 2584521.07 frames. ], batch size: 56, lr: 2.30e-02, grad_scale: 8.0 +2023-03-08 18:28:56,754 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14801.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:29:03,301 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14807.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:29:16,262 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.11 vs. limit=5.0 +2023-03-08 18:29:22,485 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=14824.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 18:29:23,284 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.926e+02 4.780e+02 5.936e+02 7.141e+02 1.719e+03, threshold=1.187e+03, percent-clipped=5.0 +2023-03-08 18:29:37,977 INFO [train.py:898] (2/4) Epoch 5, batch 300, loss[loss=0.2584, simple_loss=0.3402, pruned_loss=0.08834, over 18365.00 frames. ], tot_loss[loss=0.2398, simple_loss=0.3135, pruned_loss=0.08306, over 2818106.81 frames. ], batch size: 56, lr: 2.29e-02, grad_scale: 8.0 +2023-03-08 18:29:45,105 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14843.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:30:35,693 INFO [train.py:898] (2/4) Epoch 5, batch 350, loss[loss=0.191, simple_loss=0.2689, pruned_loss=0.05659, over 18408.00 frames. ], tot_loss[loss=0.2384, simple_loss=0.312, pruned_loss=0.08243, over 2994106.23 frames. ], batch size: 42, lr: 2.29e-02, grad_scale: 8.0 +2023-03-08 18:31:09,590 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-03-08 18:31:15,055 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.39 vs. limit=5.0 +2023-03-08 18:31:20,045 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.949e+02 4.422e+02 5.424e+02 6.713e+02 1.260e+03, threshold=1.085e+03, percent-clipped=2.0 +2023-03-08 18:31:34,506 INFO [train.py:898] (2/4) Epoch 5, batch 400, loss[loss=0.2477, simple_loss=0.3193, pruned_loss=0.08808, over 18360.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.3112, pruned_loss=0.08176, over 3133507.33 frames. ], batch size: 56, lr: 2.29e-02, grad_scale: 8.0 +2023-03-08 18:32:31,729 INFO [train.py:898] (2/4) Epoch 5, batch 450, loss[loss=0.2976, simple_loss=0.3604, pruned_loss=0.1174, over 18287.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.3124, pruned_loss=0.08237, over 3244399.52 frames. ], batch size: 57, lr: 2.28e-02, grad_scale: 8.0 +2023-03-08 18:32:46,036 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5475, 3.9888, 5.2490, 3.5030, 4.5738, 3.1308, 3.3783, 2.2305], + device='cuda:2'), covar=tensor([0.0390, 0.0449, 0.0037, 0.0326, 0.0316, 0.1155, 0.1129, 0.1212], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0159, 0.0078, 0.0124, 0.0176, 0.0209, 0.0173, 0.0172], + device='cuda:2'), out_proj_covar=tensor([1.4952e-04, 1.6114e-04, 8.1616e-05, 1.2415e-04, 1.7875e-04, 2.0659e-04, + 1.8303e-04, 1.7266e-04], device='cuda:2') +2023-03-08 18:33:17,342 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.939e+02 4.662e+02 5.762e+02 7.845e+02 1.537e+03, threshold=1.152e+03, percent-clipped=9.0 +2023-03-08 18:33:30,769 INFO [train.py:898] (2/4) Epoch 5, batch 500, loss[loss=0.2463, simple_loss=0.3212, pruned_loss=0.08572, over 18114.00 frames. ], tot_loss[loss=0.238, simple_loss=0.3117, pruned_loss=0.08214, over 3320392.80 frames. ], batch size: 62, lr: 2.28e-02, grad_scale: 8.0 +2023-03-08 18:34:10,024 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15069.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:34:23,813 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.4550, 5.1060, 5.1493, 4.9923, 4.7496, 4.9313, 4.1725, 4.9356], + device='cuda:2'), covar=tensor([0.0266, 0.0253, 0.0200, 0.0222, 0.0416, 0.0268, 0.1588, 0.0293], + device='cuda:2'), in_proj_covar=tensor([0.0122, 0.0161, 0.0144, 0.0136, 0.0155, 0.0161, 0.0233, 0.0144], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006, 0.0004], + device='cuda:2') +2023-03-08 18:34:30,390 INFO [train.py:898] (2/4) Epoch 5, batch 550, loss[loss=0.2039, simple_loss=0.2761, pruned_loss=0.06582, over 18447.00 frames. ], tot_loss[loss=0.2379, simple_loss=0.3118, pruned_loss=0.08201, over 3378948.94 frames. ], batch size: 43, lr: 2.28e-02, grad_scale: 8.0 +2023-03-08 18:34:34,081 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8375, 3.4788, 3.1442, 2.8903, 3.4473, 2.6978, 2.5068, 3.6630], + device='cuda:2'), covar=tensor([0.0022, 0.0078, 0.0100, 0.0142, 0.0061, 0.0164, 0.0200, 0.0058], + device='cuda:2'), in_proj_covar=tensor([0.0051, 0.0066, 0.0063, 0.0095, 0.0061, 0.0099, 0.0107, 0.0055], + device='cuda:2'), out_proj_covar=tensor([7.7363e-05, 1.0538e-04, 1.0367e-04, 1.5551e-04, 9.4784e-05, 1.5864e-04, + 1.7257e-04, 8.7699e-05], device='cuda:2') +2023-03-08 18:34:41,260 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=15096.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:34:54,476 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.69 vs. limit=5.0 +2023-03-08 18:35:06,920 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15117.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:35:15,839 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.759e+02 4.602e+02 5.527e+02 7.375e+02 1.442e+03, threshold=1.105e+03, percent-clipped=1.0 +2023-03-08 18:35:29,480 INFO [train.py:898] (2/4) Epoch 5, batch 600, loss[loss=0.2678, simple_loss=0.3403, pruned_loss=0.09765, over 18378.00 frames. ], tot_loss[loss=0.2383, simple_loss=0.3123, pruned_loss=0.08218, over 3427634.18 frames. ], batch size: 52, lr: 2.27e-02, grad_scale: 8.0 +2023-03-08 18:35:36,517 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15143.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:36:28,270 INFO [train.py:898] (2/4) Epoch 5, batch 650, loss[loss=0.2309, simple_loss=0.3039, pruned_loss=0.07895, over 18382.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.3127, pruned_loss=0.08232, over 3467219.23 frames. ], batch size: 52, lr: 2.27e-02, grad_scale: 8.0 +2023-03-08 18:36:32,837 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15191.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:36:37,726 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15195.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:37:13,514 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.710e+02 4.789e+02 6.054e+02 7.320e+02 2.200e+03, threshold=1.211e+03, percent-clipped=6.0 +2023-03-08 18:37:14,047 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4272, 3.7961, 5.1074, 4.2981, 3.4620, 2.9824, 4.4514, 5.1734], + device='cuda:2'), covar=tensor([0.0938, 0.1121, 0.0056, 0.0295, 0.0688, 0.0933, 0.0243, 0.0046], + device='cuda:2'), in_proj_covar=tensor([0.0134, 0.0158, 0.0065, 0.0133, 0.0163, 0.0164, 0.0135, 0.0072], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003, 0.0002, 0.0001], + device='cuda:2') +2023-03-08 18:37:27,062 INFO [train.py:898] (2/4) Epoch 5, batch 700, loss[loss=0.2596, simple_loss=0.33, pruned_loss=0.09461, over 18321.00 frames. ], tot_loss[loss=0.239, simple_loss=0.3133, pruned_loss=0.08233, over 3504662.95 frames. ], batch size: 54, lr: 2.27e-02, grad_scale: 8.0 +2023-03-08 18:37:43,084 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4336, 5.1868, 5.4823, 5.4201, 5.2675, 6.0020, 5.5435, 5.4305], + device='cuda:2'), covar=tensor([0.0697, 0.0566, 0.0512, 0.0465, 0.1144, 0.0652, 0.0497, 0.1215], + device='cuda:2'), in_proj_covar=tensor([0.0238, 0.0179, 0.0180, 0.0179, 0.0222, 0.0267, 0.0171, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-08 18:37:49,735 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15256.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 18:38:26,034 INFO [train.py:898] (2/4) Epoch 5, batch 750, loss[loss=0.2304, simple_loss=0.3062, pruned_loss=0.07727, over 18484.00 frames. ], tot_loss[loss=0.2396, simple_loss=0.3139, pruned_loss=0.08263, over 3522912.67 frames. ], batch size: 51, lr: 2.26e-02, grad_scale: 8.0 +2023-03-08 18:38:28,519 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3398, 5.0887, 5.2344, 5.3070, 5.1166, 5.8477, 5.5238, 5.3167], + device='cuda:2'), covar=tensor([0.0655, 0.0506, 0.0581, 0.0395, 0.1123, 0.0730, 0.0512, 0.1360], + device='cuda:2'), in_proj_covar=tensor([0.0237, 0.0179, 0.0181, 0.0180, 0.0221, 0.0268, 0.0170, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-08 18:39:10,698 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.929e+02 4.559e+02 5.323e+02 7.039e+02 1.434e+03, threshold=1.065e+03, percent-clipped=2.0 +2023-03-08 18:39:24,935 INFO [train.py:898] (2/4) Epoch 5, batch 800, loss[loss=0.2334, simple_loss=0.3013, pruned_loss=0.0828, over 18365.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.3131, pruned_loss=0.082, over 3542778.69 frames. ], batch size: 50, lr: 2.26e-02, grad_scale: 8.0 +2023-03-08 18:39:46,980 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.3207, 3.7677, 5.0838, 4.2507, 3.2178, 2.6805, 4.3027, 5.1411], + device='cuda:2'), covar=tensor([0.1091, 0.1085, 0.0051, 0.0279, 0.0827, 0.1121, 0.0319, 0.0066], + device='cuda:2'), in_proj_covar=tensor([0.0133, 0.0155, 0.0064, 0.0130, 0.0158, 0.0160, 0.0132, 0.0071], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0002, 0.0003, 0.0002, 0.0001], + device='cuda:2') +2023-03-08 18:40:05,194 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-08 18:40:24,766 INFO [train.py:898] (2/4) Epoch 5, batch 850, loss[loss=0.2057, simple_loss=0.2757, pruned_loss=0.06782, over 18414.00 frames. ], tot_loss[loss=0.2388, simple_loss=0.3128, pruned_loss=0.08239, over 3542280.48 frames. ], batch size: 42, lr: 2.26e-02, grad_scale: 8.0 +2023-03-08 18:40:33,022 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5653, 4.6672, 3.5353, 4.3872, 4.6965, 4.2802, 4.4258, 4.0531], + device='cuda:2'), covar=tensor([0.0779, 0.0765, 0.3529, 0.1237, 0.0571, 0.0529, 0.0841, 0.1144], + device='cuda:2'), in_proj_covar=tensor([0.0290, 0.0314, 0.0466, 0.0257, 0.0239, 0.0297, 0.0310, 0.0397], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0004, 0.0003, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 18:40:35,182 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15396.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:40:55,753 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-08 18:41:09,844 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.891e+02 4.807e+02 5.667e+02 6.596e+02 1.929e+03, threshold=1.133e+03, percent-clipped=7.0 +2023-03-08 18:41:24,181 INFO [train.py:898] (2/4) Epoch 5, batch 900, loss[loss=0.2202, simple_loss=0.3031, pruned_loss=0.06863, over 18638.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.3127, pruned_loss=0.08222, over 3559463.31 frames. ], batch size: 52, lr: 2.25e-02, grad_scale: 8.0 +2023-03-08 18:41:24,921 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.71 vs. limit=2.0 +2023-03-08 18:41:32,535 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15444.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:42:05,264 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8272, 3.4913, 3.3412, 2.9393, 3.4422, 2.7407, 2.6718, 3.7201], + device='cuda:2'), covar=tensor([0.0028, 0.0095, 0.0086, 0.0145, 0.0090, 0.0190, 0.0185, 0.0062], + device='cuda:2'), in_proj_covar=tensor([0.0054, 0.0068, 0.0064, 0.0097, 0.0063, 0.0102, 0.0111, 0.0060], + device='cuda:2'), out_proj_covar=tensor([8.1791e-05, 1.0877e-04, 1.0464e-04, 1.5736e-04, 9.8710e-05, 1.6504e-04, + 1.7853e-04, 9.5852e-05], device='cuda:2') +2023-03-08 18:42:12,480 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.98 vs. limit=2.0 +2023-03-08 18:42:23,788 INFO [train.py:898] (2/4) Epoch 5, batch 950, loss[loss=0.2162, simple_loss=0.292, pruned_loss=0.07018, over 18530.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.312, pruned_loss=0.08159, over 3572109.43 frames. ], batch size: 49, lr: 2.25e-02, grad_scale: 8.0 +2023-03-08 18:43:09,482 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.487e+02 4.499e+02 5.437e+02 6.753e+02 3.373e+03, threshold=1.087e+03, percent-clipped=5.0 +2023-03-08 18:43:23,269 INFO [train.py:898] (2/4) Epoch 5, batch 1000, loss[loss=0.2313, simple_loss=0.3155, pruned_loss=0.07358, over 18575.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.3118, pruned_loss=0.08151, over 3574119.78 frames. ], batch size: 54, lr: 2.25e-02, grad_scale: 8.0 +2023-03-08 18:43:39,867 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=15551.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 18:44:00,170 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.06 vs. limit=2.0 +2023-03-08 18:44:09,131 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-03-08 18:44:23,087 INFO [train.py:898] (2/4) Epoch 5, batch 1050, loss[loss=0.2263, simple_loss=0.3025, pruned_loss=0.075, over 18416.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.312, pruned_loss=0.08162, over 3577465.14 frames. ], batch size: 48, lr: 2.24e-02, grad_scale: 8.0 +2023-03-08 18:44:38,478 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-03-08 18:45:09,019 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.376e+02 4.268e+02 5.180e+02 6.022e+02 1.534e+03, threshold=1.036e+03, percent-clipped=2.0 +2023-03-08 18:45:17,901 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3034, 5.9173, 5.3549, 5.6454, 5.3681, 5.4662, 5.9154, 5.8296], + device='cuda:2'), covar=tensor([0.1038, 0.0502, 0.0370, 0.0594, 0.1184, 0.0538, 0.0443, 0.0548], + device='cuda:2'), in_proj_covar=tensor([0.0386, 0.0315, 0.0242, 0.0339, 0.0482, 0.0344, 0.0385, 0.0320], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:2') +2023-03-08 18:45:23,119 INFO [train.py:898] (2/4) Epoch 5, batch 1100, loss[loss=0.3356, simple_loss=0.381, pruned_loss=0.1451, over 12854.00 frames. ], tot_loss[loss=0.2381, simple_loss=0.3121, pruned_loss=0.08203, over 3560570.50 frames. ], batch size: 129, lr: 2.24e-02, grad_scale: 8.0 +2023-03-08 18:46:22,725 INFO [train.py:898] (2/4) Epoch 5, batch 1150, loss[loss=0.2269, simple_loss=0.2979, pruned_loss=0.07798, over 18548.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.3113, pruned_loss=0.08189, over 3568198.01 frames. ], batch size: 49, lr: 2.24e-02, grad_scale: 8.0 +2023-03-08 18:46:59,994 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3477, 6.0243, 5.3783, 5.7388, 5.4996, 5.5894, 6.0729, 5.9675], + device='cuda:2'), covar=tensor([0.1146, 0.0542, 0.0360, 0.0614, 0.1396, 0.0627, 0.0432, 0.0585], + device='cuda:2'), in_proj_covar=tensor([0.0385, 0.0314, 0.0241, 0.0342, 0.0482, 0.0344, 0.0390, 0.0321], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:2') +2023-03-08 18:47:06,232 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.304e+02 5.042e+02 5.995e+02 7.754e+02 1.495e+03, threshold=1.199e+03, percent-clipped=7.0 +2023-03-08 18:47:21,470 INFO [train.py:898] (2/4) Epoch 5, batch 1200, loss[loss=0.2872, simple_loss=0.3543, pruned_loss=0.11, over 18283.00 frames. ], tot_loss[loss=0.239, simple_loss=0.3128, pruned_loss=0.08257, over 3575200.98 frames. ], batch size: 57, lr: 2.23e-02, grad_scale: 8.0 +2023-03-08 18:47:45,082 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1054, 1.6890, 3.2599, 2.5902, 3.4149, 4.8245, 4.2172, 4.1237], + device='cuda:2'), covar=tensor([0.0321, 0.1016, 0.0622, 0.0701, 0.1027, 0.0031, 0.0215, 0.0157], + device='cuda:2'), in_proj_covar=tensor([0.0142, 0.0186, 0.0142, 0.0180, 0.0270, 0.0101, 0.0154, 0.0134], + device='cuda:2'), out_proj_covar=tensor([1.0843e-04, 1.4247e-04, 1.1862e-04, 1.2720e-04, 2.0363e-04, 7.1122e-05, + 1.1755e-04, 1.0244e-04], device='cuda:2') +2023-03-08 18:48:19,536 INFO [train.py:898] (2/4) Epoch 5, batch 1250, loss[loss=0.2102, simple_loss=0.2847, pruned_loss=0.06788, over 18253.00 frames. ], tot_loss[loss=0.2383, simple_loss=0.3125, pruned_loss=0.0821, over 3581284.43 frames. ], batch size: 47, lr: 2.23e-02, grad_scale: 8.0 +2023-03-08 18:49:03,912 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.909e+02 4.343e+02 5.272e+02 6.723e+02 1.264e+03, threshold=1.054e+03, percent-clipped=1.0 +2023-03-08 18:49:18,487 INFO [train.py:898] (2/4) Epoch 5, batch 1300, loss[loss=0.2581, simple_loss=0.3366, pruned_loss=0.08985, over 18247.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.3127, pruned_loss=0.08189, over 3578652.41 frames. ], batch size: 60, lr: 2.23e-02, grad_scale: 4.0 +2023-03-08 18:49:35,365 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15851.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:50:12,228 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15883.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:50:16,769 INFO [train.py:898] (2/4) Epoch 5, batch 1350, loss[loss=0.2792, simple_loss=0.3439, pruned_loss=0.1073, over 17259.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.3125, pruned_loss=0.08126, over 3586402.67 frames. ], batch size: 78, lr: 2.22e-02, grad_scale: 4.0 +2023-03-08 18:50:29,885 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-03-08 18:50:31,443 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=15899.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:51:02,178 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.900e+02 4.519e+02 5.491e+02 6.869e+02 1.432e+03, threshold=1.098e+03, percent-clipped=4.0 +2023-03-08 18:51:15,440 INFO [train.py:898] (2/4) Epoch 5, batch 1400, loss[loss=0.2442, simple_loss=0.3201, pruned_loss=0.08413, over 18351.00 frames. ], tot_loss[loss=0.2375, simple_loss=0.3121, pruned_loss=0.08146, over 3581588.24 frames. ], batch size: 56, lr: 2.22e-02, grad_scale: 4.0 +2023-03-08 18:51:24,936 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15944.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:51:37,547 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2250, 2.0616, 3.4220, 3.0089, 3.7619, 5.2013, 4.4066, 4.5343], + device='cuda:2'), covar=tensor([0.0322, 0.0882, 0.0620, 0.0525, 0.0860, 0.0024, 0.0210, 0.0129], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0190, 0.0148, 0.0184, 0.0272, 0.0104, 0.0156, 0.0136], + device='cuda:2'), out_proj_covar=tensor([1.0969e-04, 1.4536e-04, 1.2256e-04, 1.2952e-04, 2.0577e-04, 7.2840e-05, + 1.1918e-04, 1.0345e-04], device='cuda:2') +2023-03-08 18:52:02,905 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5904, 5.2633, 5.3037, 4.9829, 4.7223, 5.0022, 4.3821, 4.9819], + device='cuda:2'), covar=tensor([0.0263, 0.0268, 0.0191, 0.0284, 0.0451, 0.0236, 0.1198, 0.0282], + device='cuda:2'), in_proj_covar=tensor([0.0127, 0.0170, 0.0152, 0.0146, 0.0165, 0.0169, 0.0237, 0.0149], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005], + device='cuda:2') +2023-03-08 18:52:09,808 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.1796, 2.9456, 4.2179, 4.1906, 2.4005, 4.5420, 4.0169, 2.8103], + device='cuda:2'), covar=tensor([0.0191, 0.0979, 0.0157, 0.0130, 0.1530, 0.0114, 0.0394, 0.0929], + device='cuda:2'), in_proj_covar=tensor([0.0136, 0.0171, 0.0095, 0.0101, 0.0180, 0.0126, 0.0135, 0.0169], + device='cuda:2'), out_proj_covar=tensor([1.3142e-04, 1.6543e-04, 1.0028e-04, 9.6598e-05, 1.7298e-04, 1.1894e-04, + 1.3943e-04, 1.6947e-04], device='cuda:2') +2023-03-08 18:52:13,986 INFO [train.py:898] (2/4) Epoch 5, batch 1450, loss[loss=0.2006, simple_loss=0.272, pruned_loss=0.06455, over 18480.00 frames. ], tot_loss[loss=0.2358, simple_loss=0.3104, pruned_loss=0.0806, over 3585349.18 frames. ], batch size: 44, lr: 2.22e-02, grad_scale: 4.0 +2023-03-08 18:52:28,306 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4377, 5.0228, 5.4930, 5.3389, 5.4435, 5.9629, 5.6919, 5.4424], + device='cuda:2'), covar=tensor([0.0725, 0.0533, 0.0504, 0.0511, 0.1023, 0.0613, 0.0416, 0.1144], + device='cuda:2'), in_proj_covar=tensor([0.0236, 0.0173, 0.0180, 0.0180, 0.0219, 0.0263, 0.0169, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-08 18:52:36,512 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16001.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:52:49,716 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.3668, 3.5707, 5.1443, 4.2050, 3.4404, 2.7655, 4.4379, 5.1667], + device='cuda:2'), covar=tensor([0.0879, 0.1400, 0.0039, 0.0234, 0.0700, 0.1030, 0.0249, 0.0059], + device='cuda:2'), in_proj_covar=tensor([0.0127, 0.0160, 0.0062, 0.0130, 0.0154, 0.0158, 0.0131, 0.0073], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-08 18:53:04,645 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.050e+02 4.332e+02 5.250e+02 6.313e+02 1.611e+03, threshold=1.050e+03, percent-clipped=2.0 +2023-03-08 18:53:16,911 INFO [train.py:898] (2/4) Epoch 5, batch 1500, loss[loss=0.2468, simple_loss=0.3148, pruned_loss=0.08938, over 18412.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3107, pruned_loss=0.08068, over 3587462.33 frames. ], batch size: 42, lr: 2.21e-02, grad_scale: 4.0 +2023-03-08 18:53:47,588 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16062.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:54:05,039 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7313, 4.4960, 4.6837, 3.5819, 3.6921, 3.4557, 2.8469, 2.5070], + device='cuda:2'), covar=tensor([0.0174, 0.0166, 0.0067, 0.0213, 0.0344, 0.0211, 0.0624, 0.0875], + device='cuda:2'), in_proj_covar=tensor([0.0038, 0.0036, 0.0029, 0.0044, 0.0059, 0.0036, 0.0060, 0.0066], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0004, 0.0002, 0.0003, 0.0004], + device='cuda:2') +2023-03-08 18:54:14,568 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16085.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:54:16,408 INFO [train.py:898] (2/4) Epoch 5, batch 1550, loss[loss=0.2453, simple_loss=0.3176, pruned_loss=0.08651, over 18392.00 frames. ], tot_loss[loss=0.2361, simple_loss=0.3107, pruned_loss=0.0807, over 3580544.29 frames. ], batch size: 52, lr: 2.21e-02, grad_scale: 4.0 +2023-03-08 18:55:02,224 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.998e+02 4.560e+02 5.884e+02 7.024e+02 2.005e+03, threshold=1.177e+03, percent-clipped=3.0 +2023-03-08 18:55:14,719 INFO [train.py:898] (2/4) Epoch 5, batch 1600, loss[loss=0.2415, simple_loss=0.3215, pruned_loss=0.0808, over 18310.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.3114, pruned_loss=0.08108, over 3573511.29 frames. ], batch size: 57, lr: 2.21e-02, grad_scale: 8.0 +2023-03-08 18:55:26,135 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16146.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:55:26,477 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-08 18:55:29,589 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16149.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:56:12,426 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-08 18:56:14,014 INFO [train.py:898] (2/4) Epoch 5, batch 1650, loss[loss=0.2437, simple_loss=0.3177, pruned_loss=0.08483, over 18217.00 frames. ], tot_loss[loss=0.2365, simple_loss=0.3113, pruned_loss=0.08087, over 3578414.85 frames. ], batch size: 60, lr: 2.20e-02, grad_scale: 8.0 +2023-03-08 18:56:42,330 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16210.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:57:00,821 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.794e+02 4.512e+02 5.424e+02 6.724e+02 1.524e+03, threshold=1.085e+03, percent-clipped=4.0 +2023-03-08 18:57:13,388 INFO [train.py:898] (2/4) Epoch 5, batch 1700, loss[loss=0.2518, simple_loss=0.3276, pruned_loss=0.08801, over 18121.00 frames. ], tot_loss[loss=0.237, simple_loss=0.3116, pruned_loss=0.08123, over 3568552.34 frames. ], batch size: 62, lr: 2.20e-02, grad_scale: 8.0 +2023-03-08 18:57:15,872 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16239.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:57:39,744 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6805, 3.2709, 3.2328, 2.7642, 3.2136, 2.6770, 2.3465, 3.6501], + device='cuda:2'), covar=tensor([0.0036, 0.0089, 0.0082, 0.0135, 0.0087, 0.0170, 0.0217, 0.0055], + device='cuda:2'), in_proj_covar=tensor([0.0053, 0.0069, 0.0063, 0.0097, 0.0062, 0.0100, 0.0111, 0.0060], + device='cuda:2'), out_proj_covar=tensor([7.7592e-05, 1.0996e-04, 1.0259e-04, 1.5883e-04, 9.6760e-05, 1.6002e-04, + 1.7748e-04, 9.3982e-05], device='cuda:2') +2023-03-08 18:57:40,838 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5269, 3.1193, 1.3215, 4.2747, 2.8823, 4.5348, 1.5932, 3.5803], + device='cuda:2'), covar=tensor([0.0414, 0.0744, 0.1591, 0.0255, 0.0801, 0.0088, 0.1465, 0.0408], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0187, 0.0162, 0.0159, 0.0160, 0.0114, 0.0167, 0.0157], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 18:57:41,963 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5589, 2.8750, 2.2780, 2.6996, 3.3758, 3.4460, 2.7284, 2.9517], + device='cuda:2'), covar=tensor([0.0228, 0.0322, 0.0879, 0.0372, 0.0285, 0.0208, 0.0469, 0.0408], + device='cuda:2'), in_proj_covar=tensor([0.0099, 0.0074, 0.0132, 0.0101, 0.0071, 0.0054, 0.0094, 0.0100], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 18:58:04,380 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2207, 4.3143, 2.1745, 4.2635, 5.1580, 2.1349, 3.7132, 3.5519], + device='cuda:2'), covar=tensor([0.0056, 0.0903, 0.1603, 0.0457, 0.0035, 0.1375, 0.0641, 0.0738], + device='cuda:2'), in_proj_covar=tensor([0.0077, 0.0150, 0.0170, 0.0166, 0.0068, 0.0158, 0.0177, 0.0173], + device='cuda:2'), out_proj_covar=tensor([1.0740e-04, 2.1589e-04, 2.2263e-04, 2.2314e-04, 9.4955e-05, 2.1470e-04, + 2.3076e-04, 2.3246e-04], device='cuda:2') +2023-03-08 18:58:13,027 INFO [train.py:898] (2/4) Epoch 5, batch 1750, loss[loss=0.2287, simple_loss=0.3067, pruned_loss=0.07533, over 18382.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.3115, pruned_loss=0.0807, over 3575680.99 frames. ], batch size: 50, lr: 2.20e-02, grad_scale: 8.0 +2023-03-08 18:58:58,895 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.349e+02 4.407e+02 5.169e+02 6.479e+02 1.420e+03, threshold=1.034e+03, percent-clipped=4.0 +2023-03-08 18:59:11,657 INFO [train.py:898] (2/4) Epoch 5, batch 1800, loss[loss=0.2379, simple_loss=0.3127, pruned_loss=0.08159, over 18547.00 frames. ], tot_loss[loss=0.2356, simple_loss=0.3105, pruned_loss=0.08037, over 3581170.63 frames. ], batch size: 49, lr: 2.19e-02, grad_scale: 8.0 +2023-03-08 18:59:35,183 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16357.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 18:59:52,292 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16371.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:00:10,468 INFO [train.py:898] (2/4) Epoch 5, batch 1850, loss[loss=0.277, simple_loss=0.3381, pruned_loss=0.1079, over 15920.00 frames. ], tot_loss[loss=0.2372, simple_loss=0.312, pruned_loss=0.08122, over 3575112.38 frames. ], batch size: 94, lr: 2.19e-02, grad_scale: 8.0 +2023-03-08 19:00:36,466 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.1729, 5.3642, 2.9719, 5.1059, 5.0328, 5.4173, 5.1341, 2.6147], + device='cuda:2'), covar=tensor([0.0151, 0.0040, 0.0603, 0.0061, 0.0061, 0.0039, 0.0077, 0.0977], + device='cuda:2'), in_proj_covar=tensor([0.0064, 0.0050, 0.0081, 0.0063, 0.0060, 0.0048, 0.0063, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-08 19:00:55,915 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.146e+02 4.944e+02 5.980e+02 8.174e+02 1.619e+03, threshold=1.196e+03, percent-clipped=7.0 +2023-03-08 19:01:03,169 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16432.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:01:09,063 INFO [train.py:898] (2/4) Epoch 5, batch 1900, loss[loss=0.2261, simple_loss=0.3089, pruned_loss=0.07163, over 18259.00 frames. ], tot_loss[loss=0.2376, simple_loss=0.3126, pruned_loss=0.08135, over 3572971.65 frames. ], batch size: 60, lr: 2.19e-02, grad_scale: 8.0 +2023-03-08 19:01:13,690 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16441.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:02:07,352 INFO [train.py:898] (2/4) Epoch 5, batch 1950, loss[loss=0.2536, simple_loss=0.3275, pruned_loss=0.08984, over 18363.00 frames. ], tot_loss[loss=0.2377, simple_loss=0.3123, pruned_loss=0.0815, over 3566571.39 frames. ], batch size: 56, lr: 2.19e-02, grad_scale: 8.0 +2023-03-08 19:02:26,346 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.98 vs. limit=2.0 +2023-03-08 19:02:28,079 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16505.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:02:53,790 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.181e+02 4.721e+02 5.735e+02 6.721e+02 1.938e+03, threshold=1.147e+03, percent-clipped=5.0 +2023-03-08 19:03:06,041 INFO [train.py:898] (2/4) Epoch 5, batch 2000, loss[loss=0.2696, simple_loss=0.3423, pruned_loss=0.09845, over 17133.00 frames. ], tot_loss[loss=0.2384, simple_loss=0.313, pruned_loss=0.0819, over 3566479.36 frames. ], batch size: 78, lr: 2.18e-02, grad_scale: 8.0 +2023-03-08 19:03:09,106 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16539.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:03:26,187 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.3375, 5.0366, 5.0695, 4.8519, 4.6442, 4.7928, 4.2208, 4.7983], + device='cuda:2'), covar=tensor([0.0277, 0.0318, 0.0204, 0.0240, 0.0350, 0.0245, 0.1265, 0.0267], + device='cuda:2'), in_proj_covar=tensor([0.0124, 0.0169, 0.0152, 0.0145, 0.0161, 0.0169, 0.0236, 0.0148], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0004], + device='cuda:2') +2023-03-08 19:04:04,802 INFO [train.py:898] (2/4) Epoch 5, batch 2050, loss[loss=0.2765, simple_loss=0.3504, pruned_loss=0.1013, over 18285.00 frames. ], tot_loss[loss=0.2368, simple_loss=0.3116, pruned_loss=0.08103, over 3578753.11 frames. ], batch size: 57, lr: 2.18e-02, grad_scale: 8.0 +2023-03-08 19:04:05,083 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16587.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:04:28,138 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.97 vs. limit=2.0 +2023-03-08 19:04:51,606 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.823e+02 4.363e+02 5.469e+02 6.595e+02 1.217e+03, threshold=1.094e+03, percent-clipped=2.0 +2023-03-08 19:05:04,761 INFO [train.py:898] (2/4) Epoch 5, batch 2100, loss[loss=0.2023, simple_loss=0.2727, pruned_loss=0.06596, over 17678.00 frames. ], tot_loss[loss=0.2366, simple_loss=0.3113, pruned_loss=0.08098, over 3559676.79 frames. ], batch size: 39, lr: 2.18e-02, grad_scale: 8.0 +2023-03-08 19:05:28,421 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16657.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:05:29,512 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3301, 4.9897, 5.4512, 5.3372, 5.1818, 5.9765, 5.5553, 5.4323], + device='cuda:2'), covar=tensor([0.0752, 0.0536, 0.0534, 0.0469, 0.1110, 0.0573, 0.0476, 0.1069], + device='cuda:2'), in_proj_covar=tensor([0.0237, 0.0178, 0.0182, 0.0177, 0.0220, 0.0266, 0.0169, 0.0257], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-08 19:06:04,045 INFO [train.py:898] (2/4) Epoch 5, batch 2150, loss[loss=0.2376, simple_loss=0.3165, pruned_loss=0.07935, over 18128.00 frames. ], tot_loss[loss=0.2354, simple_loss=0.3104, pruned_loss=0.08026, over 3562653.52 frames. ], batch size: 62, lr: 2.17e-02, grad_scale: 8.0 +2023-03-08 19:06:24,764 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16705.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:06:49,438 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.208e+02 4.846e+02 5.504e+02 7.182e+02 1.365e+03, threshold=1.101e+03, percent-clipped=1.0 +2023-03-08 19:06:50,857 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16727.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:07:02,287 INFO [train.py:898] (2/4) Epoch 5, batch 2200, loss[loss=0.2254, simple_loss=0.2885, pruned_loss=0.08119, over 18440.00 frames. ], tot_loss[loss=0.2363, simple_loss=0.3108, pruned_loss=0.08093, over 3561475.99 frames. ], batch size: 42, lr: 2.17e-02, grad_scale: 8.0 +2023-03-08 19:07:03,751 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7353, 3.3772, 1.7187, 4.4871, 3.0111, 4.7202, 2.1970, 4.0438], + device='cuda:2'), covar=tensor([0.0424, 0.0872, 0.1503, 0.0308, 0.0879, 0.0119, 0.1114, 0.0320], + device='cuda:2'), in_proj_covar=tensor([0.0153, 0.0189, 0.0162, 0.0161, 0.0163, 0.0120, 0.0166, 0.0155], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 19:07:06,943 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16741.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:07:39,914 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4706, 3.3640, 3.1566, 2.8314, 3.3749, 2.7300, 2.6560, 3.6159], + device='cuda:2'), covar=tensor([0.0043, 0.0078, 0.0097, 0.0117, 0.0065, 0.0145, 0.0161, 0.0052], + device='cuda:2'), in_proj_covar=tensor([0.0054, 0.0068, 0.0063, 0.0100, 0.0063, 0.0104, 0.0109, 0.0059], + device='cuda:2'), out_proj_covar=tensor([7.9187e-05, 1.0684e-04, 1.0274e-04, 1.6219e-04, 9.8267e-05, 1.6583e-04, + 1.7306e-04, 9.2402e-05], device='cuda:2') +2023-03-08 19:08:01,109 INFO [train.py:898] (2/4) Epoch 5, batch 2250, loss[loss=0.2314, simple_loss=0.3174, pruned_loss=0.07266, over 18633.00 frames. ], tot_loss[loss=0.236, simple_loss=0.3108, pruned_loss=0.08063, over 3571746.93 frames. ], batch size: 52, lr: 2.17e-02, grad_scale: 8.0 +2023-03-08 19:08:03,426 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16789.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:08:18,985 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5765, 5.2604, 5.2991, 5.1237, 4.8100, 5.1264, 4.4603, 5.0146], + device='cuda:2'), covar=tensor([0.0202, 0.0214, 0.0144, 0.0166, 0.0291, 0.0190, 0.0956, 0.0258], + device='cuda:2'), in_proj_covar=tensor([0.0125, 0.0168, 0.0150, 0.0144, 0.0162, 0.0168, 0.0238, 0.0151], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005, 0.0006, 0.0005], + device='cuda:2') +2023-03-08 19:08:22,309 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16805.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:08:24,056 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-08 19:08:46,499 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.297e+02 4.310e+02 5.501e+02 7.389e+02 1.420e+03, threshold=1.100e+03, percent-clipped=2.0 +2023-03-08 19:09:00,055 INFO [train.py:898] (2/4) Epoch 5, batch 2300, loss[loss=0.3043, simple_loss=0.362, pruned_loss=0.1233, over 18473.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.3103, pruned_loss=0.08056, over 3583135.32 frames. ], batch size: 59, lr: 2.16e-02, grad_scale: 8.0 +2023-03-08 19:09:17,950 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=16853.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:09:35,514 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16868.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 19:09:54,496 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9850, 1.8147, 3.4119, 2.9038, 3.5207, 5.1819, 4.2646, 4.3127], + device='cuda:2'), covar=tensor([0.0358, 0.0951, 0.0676, 0.0570, 0.0929, 0.0027, 0.0219, 0.0155], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0196, 0.0162, 0.0190, 0.0280, 0.0105, 0.0167, 0.0142], + device='cuda:2'), out_proj_covar=tensor([1.1372e-04, 1.4726e-04, 1.3103e-04, 1.3227e-04, 2.0889e-04, 7.2798e-05, + 1.2526e-04, 1.0677e-04], device='cuda:2') +2023-03-08 19:09:57,304 INFO [train.py:898] (2/4) Epoch 5, batch 2350, loss[loss=0.2175, simple_loss=0.2918, pruned_loss=0.07153, over 18535.00 frames. ], tot_loss[loss=0.2364, simple_loss=0.3112, pruned_loss=0.08078, over 3595467.17 frames. ], batch size: 44, lr: 2.16e-02, grad_scale: 8.0 +2023-03-08 19:10:16,714 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.89 vs. limit=5.0 +2023-03-08 19:10:19,809 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16906.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:10:42,829 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.402e+02 4.892e+02 6.073e+02 7.229e+02 1.276e+03, threshold=1.215e+03, percent-clipped=6.0 +2023-03-08 19:10:47,082 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16929.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 19:10:56,187 INFO [train.py:898] (2/4) Epoch 5, batch 2400, loss[loss=0.2921, simple_loss=0.3484, pruned_loss=0.1179, over 12462.00 frames. ], tot_loss[loss=0.2359, simple_loss=0.3108, pruned_loss=0.0805, over 3595260.80 frames. ], batch size: 129, lr: 2.16e-02, grad_scale: 8.0 +2023-03-08 19:11:05,856 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9475, 2.6099, 3.8810, 4.0185, 2.3762, 4.2501, 3.7753, 2.6916], + device='cuda:2'), covar=tensor([0.0214, 0.1109, 0.0180, 0.0121, 0.1183, 0.0114, 0.0377, 0.0842], + device='cuda:2'), in_proj_covar=tensor([0.0140, 0.0181, 0.0098, 0.0104, 0.0178, 0.0130, 0.0143, 0.0172], + device='cuda:2'), out_proj_covar=tensor([1.3588e-04, 1.7555e-04, 1.0273e-04, 9.9450e-05, 1.6899e-04, 1.2328e-04, + 1.4613e-04, 1.7131e-04], device='cuda:2') +2023-03-08 19:11:31,428 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16967.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 19:11:54,535 INFO [train.py:898] (2/4) Epoch 5, batch 2450, loss[loss=0.2505, simple_loss=0.324, pruned_loss=0.08845, over 17044.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.3105, pruned_loss=0.0802, over 3596819.78 frames. ], batch size: 78, lr: 2.16e-02, grad_scale: 8.0 +2023-03-08 19:11:55,214 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-08 19:12:39,638 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.725e+02 4.440e+02 5.461e+02 7.873e+02 1.788e+03, threshold=1.092e+03, percent-clipped=5.0 +2023-03-08 19:12:41,248 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17027.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:12:53,114 INFO [train.py:898] (2/4) Epoch 5, batch 2500, loss[loss=0.2395, simple_loss=0.3242, pruned_loss=0.07742, over 17876.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.3094, pruned_loss=0.07974, over 3588017.78 frames. ], batch size: 70, lr: 2.15e-02, grad_scale: 8.0 +2023-03-08 19:12:53,609 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.1922, 3.4171, 5.0113, 4.0613, 3.2580, 3.0751, 4.3218, 5.0778], + device='cuda:2'), covar=tensor([0.1059, 0.1483, 0.0057, 0.0348, 0.0795, 0.0978, 0.0297, 0.0062], + device='cuda:2'), in_proj_covar=tensor([0.0131, 0.0170, 0.0067, 0.0136, 0.0160, 0.0162, 0.0138, 0.0083], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0002, 0.0003, 0.0002, 0.0001], + device='cuda:2') +2023-03-08 19:13:18,967 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-08 19:13:37,617 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17075.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:13:51,010 INFO [train.py:898] (2/4) Epoch 5, batch 2550, loss[loss=0.2332, simple_loss=0.3171, pruned_loss=0.07462, over 18500.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.309, pruned_loss=0.0792, over 3602027.24 frames. ], batch size: 51, lr: 2.15e-02, grad_scale: 8.0 +2023-03-08 19:14:36,784 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.128e+02 4.504e+02 5.454e+02 6.580e+02 1.151e+03, threshold=1.091e+03, percent-clipped=2.0 +2023-03-08 19:14:49,106 INFO [train.py:898] (2/4) Epoch 5, batch 2600, loss[loss=0.2579, simple_loss=0.34, pruned_loss=0.08793, over 17980.00 frames. ], tot_loss[loss=0.2337, simple_loss=0.3088, pruned_loss=0.07932, over 3597822.09 frames. ], batch size: 65, lr: 2.15e-02, grad_scale: 8.0 +2023-03-08 19:15:47,095 INFO [train.py:898] (2/4) Epoch 5, batch 2650, loss[loss=0.2561, simple_loss=0.3272, pruned_loss=0.09249, over 17992.00 frames. ], tot_loss[loss=0.2351, simple_loss=0.3099, pruned_loss=0.08014, over 3594055.06 frames. ], batch size: 65, lr: 2.14e-02, grad_scale: 8.0 +2023-03-08 19:15:49,564 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17189.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:16:10,459 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6559, 2.4029, 4.1722, 3.9760, 2.1029, 4.5224, 3.4985, 2.8211], + device='cuda:2'), covar=tensor([0.0357, 0.1362, 0.0117, 0.0191, 0.1658, 0.0112, 0.0432, 0.0941], + device='cuda:2'), in_proj_covar=tensor([0.0145, 0.0185, 0.0101, 0.0107, 0.0185, 0.0136, 0.0150, 0.0174], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 19:16:31,339 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17224.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 19:16:33,376 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.843e+02 4.713e+02 5.523e+02 7.197e+02 1.239e+03, threshold=1.105e+03, percent-clipped=3.0 +2023-03-08 19:16:35,255 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-08 19:16:45,586 INFO [train.py:898] (2/4) Epoch 5, batch 2700, loss[loss=0.213, simple_loss=0.2869, pruned_loss=0.06955, over 18271.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.3098, pruned_loss=0.07996, over 3594276.58 frames. ], batch size: 49, lr: 2.14e-02, grad_scale: 8.0 +2023-03-08 19:17:01,453 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17250.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:17:15,277 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17262.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 19:17:43,514 INFO [train.py:898] (2/4) Epoch 5, batch 2750, loss[loss=0.2308, simple_loss=0.3103, pruned_loss=0.07566, over 18566.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.3098, pruned_loss=0.07964, over 3591447.23 frames. ], batch size: 54, lr: 2.14e-02, grad_scale: 8.0 +2023-03-08 19:17:50,881 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5200, 3.3255, 1.6221, 4.1509, 2.6104, 4.5651, 2.3307, 3.8313], + device='cuda:2'), covar=tensor([0.0453, 0.0627, 0.1401, 0.0322, 0.0853, 0.0142, 0.0988, 0.0314], + device='cuda:2'), in_proj_covar=tensor([0.0155, 0.0189, 0.0166, 0.0166, 0.0165, 0.0122, 0.0171, 0.0161], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 19:18:29,772 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.562e+02 4.313e+02 5.357e+02 6.708e+02 1.255e+03, threshold=1.071e+03, percent-clipped=3.0 +2023-03-08 19:18:42,600 INFO [train.py:898] (2/4) Epoch 5, batch 2800, loss[loss=0.2544, simple_loss=0.3304, pruned_loss=0.08917, over 18355.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.3096, pruned_loss=0.07965, over 3584302.72 frames. ], batch size: 56, lr: 2.14e-02, grad_scale: 8.0 +2023-03-08 19:18:53,132 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7635, 2.8973, 2.5154, 2.8565, 3.5713, 3.7229, 2.8809, 3.3234], + device='cuda:2'), covar=tensor([0.0281, 0.0260, 0.0858, 0.0424, 0.0258, 0.0203, 0.0447, 0.0349], + device='cuda:2'), in_proj_covar=tensor([0.0103, 0.0075, 0.0133, 0.0104, 0.0075, 0.0057, 0.0097, 0.0099], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 19:19:10,342 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-08 19:19:12,285 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17362.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:19:19,558 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7182, 1.7181, 3.2586, 2.5616, 3.5005, 4.8098, 4.1224, 4.3147], + device='cuda:2'), covar=tensor([0.0394, 0.0942, 0.0667, 0.0631, 0.0896, 0.0027, 0.0226, 0.0126], + device='cuda:2'), in_proj_covar=tensor([0.0154, 0.0200, 0.0168, 0.0191, 0.0286, 0.0109, 0.0170, 0.0144], + device='cuda:2'), out_proj_covar=tensor([1.1435e-04, 1.4958e-04, 1.3502e-04, 1.3099e-04, 2.1150e-04, 7.4422e-05, + 1.2511e-04, 1.0705e-04], device='cuda:2') +2023-03-08 19:19:36,925 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17383.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 19:19:41,212 INFO [train.py:898] (2/4) Epoch 5, batch 2850, loss[loss=0.2085, simple_loss=0.28, pruned_loss=0.0685, over 18488.00 frames. ], tot_loss[loss=0.2342, simple_loss=0.3092, pruned_loss=0.07964, over 3587357.82 frames. ], batch size: 44, lr: 2.13e-02, grad_scale: 8.0 +2023-03-08 19:19:54,117 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.84 vs. limit=5.0 +2023-03-08 19:20:13,563 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7336, 4.5714, 4.6591, 4.4457, 4.4309, 4.5808, 5.0043, 4.9700], + device='cuda:2'), covar=tensor([0.0054, 0.0099, 0.0089, 0.0086, 0.0082, 0.0095, 0.0059, 0.0096], + device='cuda:2'), in_proj_covar=tensor([0.0062, 0.0049, 0.0048, 0.0061, 0.0053, 0.0070, 0.0058, 0.0059], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 19:20:24,503 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17423.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:20:27,503 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.643e+02 4.399e+02 5.133e+02 6.375e+02 2.228e+03, threshold=1.027e+03, percent-clipped=3.0 +2023-03-08 19:20:38,420 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8219, 4.9265, 4.8478, 4.6091, 4.5440, 4.6267, 5.1896, 5.1444], + device='cuda:2'), covar=tensor([0.0055, 0.0077, 0.0063, 0.0082, 0.0084, 0.0110, 0.0058, 0.0081], + device='cuda:2'), in_proj_covar=tensor([0.0061, 0.0049, 0.0048, 0.0061, 0.0052, 0.0069, 0.0056, 0.0058], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 19:20:40,825 INFO [train.py:898] (2/4) Epoch 5, batch 2900, loss[loss=0.2155, simple_loss=0.2861, pruned_loss=0.07248, over 18141.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.3082, pruned_loss=0.07912, over 3590229.27 frames. ], batch size: 44, lr: 2.13e-02, grad_scale: 8.0 +2023-03-08 19:20:43,556 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.2265, 5.5458, 3.0006, 5.2624, 5.1841, 5.5796, 5.3531, 2.7112], + device='cuda:2'), covar=tensor([0.0139, 0.0053, 0.0677, 0.0058, 0.0072, 0.0061, 0.0116, 0.0972], + device='cuda:2'), in_proj_covar=tensor([0.0062, 0.0049, 0.0080, 0.0064, 0.0060, 0.0050, 0.0063, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-08 19:20:49,147 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17444.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 19:21:38,384 INFO [train.py:898] (2/4) Epoch 5, batch 2950, loss[loss=0.2326, simple_loss=0.3166, pruned_loss=0.07431, over 18284.00 frames. ], tot_loss[loss=0.2325, simple_loss=0.3075, pruned_loss=0.07873, over 3597599.38 frames. ], batch size: 57, lr: 2.13e-02, grad_scale: 8.0 +2023-03-08 19:22:22,678 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17524.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 19:22:24,447 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.921e+02 4.474e+02 5.609e+02 7.240e+02 1.382e+03, threshold=1.122e+03, percent-clipped=5.0 +2023-03-08 19:22:36,711 INFO [train.py:898] (2/4) Epoch 5, batch 3000, loss[loss=0.2641, simple_loss=0.3361, pruned_loss=0.09603, over 18352.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.3074, pruned_loss=0.07856, over 3604298.69 frames. ], batch size: 55, lr: 2.12e-02, grad_scale: 8.0 +2023-03-08 19:22:36,712 INFO [train.py:923] (2/4) Computing validation loss +2023-03-08 19:22:48,699 INFO [train.py:932] (2/4) Epoch 5, validation: loss=0.1806, simple_loss=0.2829, pruned_loss=0.03918, over 944034.00 frames. +2023-03-08 19:22:48,700 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-08 19:22:58,793 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17545.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:23:19,431 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17562.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 19:23:30,587 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17572.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 19:23:47,563 INFO [train.py:898] (2/4) Epoch 5, batch 3050, loss[loss=0.2424, simple_loss=0.3227, pruned_loss=0.08101, over 18336.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.307, pruned_loss=0.07839, over 3590832.66 frames. ], batch size: 55, lr: 2.12e-02, grad_scale: 8.0 +2023-03-08 19:24:15,395 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17610.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:24:33,682 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.113e+02 4.467e+02 5.746e+02 6.739e+02 1.721e+03, threshold=1.149e+03, percent-clipped=3.0 +2023-03-08 19:24:46,752 INFO [train.py:898] (2/4) Epoch 5, batch 3100, loss[loss=0.1987, simple_loss=0.2753, pruned_loss=0.06107, over 17647.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.3056, pruned_loss=0.07769, over 3595363.92 frames. ], batch size: 39, lr: 2.12e-02, grad_scale: 8.0 +2023-03-08 19:25:29,286 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-03-08 19:25:45,576 INFO [train.py:898] (2/4) Epoch 5, batch 3150, loss[loss=0.2223, simple_loss=0.2912, pruned_loss=0.07669, over 18349.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3051, pruned_loss=0.07749, over 3598014.32 frames. ], batch size: 46, lr: 2.12e-02, grad_scale: 8.0 +2023-03-08 19:26:00,879 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17700.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:26:22,154 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17718.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:26:31,145 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.328e+02 4.460e+02 5.383e+02 6.414e+02 1.196e+03, threshold=1.077e+03, percent-clipped=2.0 +2023-03-08 19:26:35,970 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17730.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:26:44,039 INFO [train.py:898] (2/4) Epoch 5, batch 3200, loss[loss=0.2663, simple_loss=0.3466, pruned_loss=0.09306, over 18292.00 frames. ], tot_loss[loss=0.23, simple_loss=0.3049, pruned_loss=0.0775, over 3600322.72 frames. ], batch size: 57, lr: 2.11e-02, grad_scale: 8.0 +2023-03-08 19:26:46,689 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17739.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 19:27:06,852 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-08 19:27:12,105 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17761.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:27:13,665 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9335, 2.7392, 4.2445, 4.2946, 2.5511, 4.6107, 4.0235, 2.5139], + device='cuda:2'), covar=tensor([0.0241, 0.1049, 0.0201, 0.0121, 0.1348, 0.0107, 0.0295, 0.1067], + device='cuda:2'), in_proj_covar=tensor([0.0149, 0.0185, 0.0102, 0.0110, 0.0187, 0.0140, 0.0150, 0.0171], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 19:27:42,483 INFO [train.py:898] (2/4) Epoch 5, batch 3250, loss[loss=0.2012, simple_loss=0.282, pruned_loss=0.06017, over 18549.00 frames. ], tot_loss[loss=0.2303, simple_loss=0.3054, pruned_loss=0.07759, over 3588866.01 frames. ], batch size: 49, lr: 2.11e-02, grad_scale: 8.0 +2023-03-08 19:27:47,401 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17791.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:27:52,848 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9028, 4.8531, 4.3214, 4.8030, 4.7915, 4.3161, 4.7978, 4.4768], + device='cuda:2'), covar=tensor([0.0407, 0.0499, 0.1698, 0.0684, 0.0487, 0.0430, 0.0348, 0.0949], + device='cuda:2'), in_proj_covar=tensor([0.0308, 0.0345, 0.0504, 0.0271, 0.0256, 0.0323, 0.0332, 0.0438], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-08 19:28:28,459 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.849e+02 4.466e+02 5.558e+02 6.730e+02 1.712e+03, threshold=1.112e+03, percent-clipped=5.0 +2023-03-08 19:28:40,516 INFO [train.py:898] (2/4) Epoch 5, batch 3300, loss[loss=0.2572, simple_loss=0.3305, pruned_loss=0.09193, over 15989.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.3065, pruned_loss=0.07845, over 3579138.85 frames. ], batch size: 94, lr: 2.11e-02, grad_scale: 16.0 +2023-03-08 19:28:41,363 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-08 19:28:50,447 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17845.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:29:00,756 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9528, 3.0962, 4.1491, 4.2896, 2.4527, 4.6217, 4.0968, 2.7069], + device='cuda:2'), covar=tensor([0.0317, 0.0941, 0.0158, 0.0125, 0.1399, 0.0111, 0.0288, 0.0888], + device='cuda:2'), in_proj_covar=tensor([0.0148, 0.0185, 0.0102, 0.0109, 0.0184, 0.0140, 0.0149, 0.0170], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 19:29:39,234 INFO [train.py:898] (2/4) Epoch 5, batch 3350, loss[loss=0.2295, simple_loss=0.3156, pruned_loss=0.07172, over 18492.00 frames. ], tot_loss[loss=0.2328, simple_loss=0.3079, pruned_loss=0.07882, over 3581234.11 frames. ], batch size: 53, lr: 2.11e-02, grad_scale: 16.0 +2023-03-08 19:29:46,132 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=17893.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:30:06,850 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.76 vs. limit=2.0 +2023-03-08 19:30:25,124 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.611e+02 4.192e+02 5.439e+02 6.827e+02 1.413e+03, threshold=1.088e+03, percent-clipped=2.0 +2023-03-08 19:30:38,046 INFO [train.py:898] (2/4) Epoch 5, batch 3400, loss[loss=0.2771, simple_loss=0.347, pruned_loss=0.1036, over 16201.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.3085, pruned_loss=0.0792, over 3582259.82 frames. ], batch size: 94, lr: 2.10e-02, grad_scale: 16.0 +2023-03-08 19:30:59,425 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4666, 4.3437, 5.3294, 3.6928, 4.4821, 3.1151, 3.3678, 2.5090], + device='cuda:2'), covar=tensor([0.0522, 0.0449, 0.0038, 0.0335, 0.0451, 0.1458, 0.1482, 0.1266], + device='cuda:2'), in_proj_covar=tensor([0.0165, 0.0175, 0.0084, 0.0137, 0.0190, 0.0223, 0.0201, 0.0183], + device='cuda:2'), out_proj_covar=tensor([1.5827e-04, 1.7337e-04, 8.7128e-05, 1.3496e-04, 1.8774e-04, 2.1592e-04, + 2.0394e-04, 1.8129e-04], device='cuda:2') +2023-03-08 19:31:04,929 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17960.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:31:36,928 INFO [train.py:898] (2/4) Epoch 5, batch 3450, loss[loss=0.25, simple_loss=0.3292, pruned_loss=0.08541, over 18613.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3078, pruned_loss=0.07878, over 3584792.41 frames. ], batch size: 52, lr: 2.10e-02, grad_scale: 16.0 +2023-03-08 19:31:44,731 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2590, 4.3470, 2.4246, 4.5933, 5.3727, 2.5112, 3.7026, 4.0562], + device='cuda:2'), covar=tensor([0.0068, 0.0983, 0.1680, 0.0474, 0.0042, 0.1519, 0.0708, 0.0673], + device='cuda:2'), in_proj_covar=tensor([0.0083, 0.0165, 0.0175, 0.0169, 0.0072, 0.0160, 0.0178, 0.0180], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 19:32:17,443 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18018.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:32:20,939 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18021.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:32:26,801 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.873e+02 4.562e+02 5.412e+02 6.904e+02 1.746e+03, threshold=1.082e+03, percent-clipped=5.0 +2023-03-08 19:32:39,599 INFO [train.py:898] (2/4) Epoch 5, batch 3500, loss[loss=0.2173, simple_loss=0.306, pruned_loss=0.06432, over 18626.00 frames. ], tot_loss[loss=0.2327, simple_loss=0.3078, pruned_loss=0.0788, over 3588691.61 frames. ], batch size: 52, lr: 2.10e-02, grad_scale: 16.0 +2023-03-08 19:32:42,139 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18039.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 19:33:01,672 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18056.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:33:12,384 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18066.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:33:33,591 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18086.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:33:34,498 INFO [train.py:898] (2/4) Epoch 5, batch 3550, loss[loss=0.2367, simple_loss=0.3035, pruned_loss=0.08497, over 18299.00 frames. ], tot_loss[loss=0.2336, simple_loss=0.3084, pruned_loss=0.07934, over 3579827.61 frames. ], batch size: 49, lr: 2.09e-02, grad_scale: 16.0 +2023-03-08 19:33:34,635 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18087.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 19:34:10,861 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-08 19:34:17,501 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.924e+02 4.145e+02 5.139e+02 6.642e+02 1.282e+03, threshold=1.028e+03, percent-clipped=2.0 +2023-03-08 19:34:29,571 INFO [train.py:898] (2/4) Epoch 5, batch 3600, loss[loss=0.1824, simple_loss=0.2592, pruned_loss=0.05284, over 18493.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.308, pruned_loss=0.07908, over 3569457.28 frames. ], batch size: 44, lr: 2.09e-02, grad_scale: 16.0 +2023-03-08 19:35:34,904 INFO [train.py:898] (2/4) Epoch 6, batch 0, loss[loss=0.2083, simple_loss=0.2885, pruned_loss=0.06409, over 18371.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2885, pruned_loss=0.06409, over 18371.00 frames. ], batch size: 46, lr: 1.95e-02, grad_scale: 16.0 +2023-03-08 19:35:34,904 INFO [train.py:923] (2/4) Computing validation loss +2023-03-08 19:35:45,293 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5404, 4.2801, 4.4033, 4.1747, 4.2790, 4.2768, 4.6162, 4.5902], + device='cuda:2'), covar=tensor([0.0056, 0.0088, 0.0078, 0.0096, 0.0083, 0.0107, 0.0082, 0.0104], + device='cuda:2'), in_proj_covar=tensor([0.0061, 0.0046, 0.0047, 0.0058, 0.0052, 0.0068, 0.0057, 0.0056], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 19:35:46,590 INFO [train.py:932] (2/4) Epoch 6, validation: loss=0.1816, simple_loss=0.2843, pruned_loss=0.0395, over 944034.00 frames. +2023-03-08 19:35:46,591 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-08 19:35:50,352 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18174.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:35:51,518 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18175.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:36:04,214 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-03-08 19:36:44,911 INFO [train.py:898] (2/4) Epoch 6, batch 50, loss[loss=0.2747, simple_loss=0.3458, pruned_loss=0.1018, over 18258.00 frames. ], tot_loss[loss=0.2334, simple_loss=0.3093, pruned_loss=0.07876, over 802251.20 frames. ], batch size: 60, lr: 1.95e-02, grad_scale: 8.0 +2023-03-08 19:36:52,253 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.724e+02 4.768e+02 5.691e+02 6.790e+02 1.877e+03, threshold=1.138e+03, percent-clipped=9.0 +2023-03-08 19:37:01,549 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18235.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:37:02,678 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18236.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:37:12,687 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3868, 5.1942, 5.4460, 5.4003, 5.2354, 6.0654, 5.6710, 5.4433], + device='cuda:2'), covar=tensor([0.0776, 0.0505, 0.0664, 0.0443, 0.1464, 0.0655, 0.0478, 0.1362], + device='cuda:2'), in_proj_covar=tensor([0.0251, 0.0189, 0.0193, 0.0187, 0.0235, 0.0278, 0.0180, 0.0271], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-08 19:37:43,188 INFO [train.py:898] (2/4) Epoch 6, batch 100, loss[loss=0.2295, simple_loss=0.3052, pruned_loss=0.07683, over 18289.00 frames. ], tot_loss[loss=0.2282, simple_loss=0.3054, pruned_loss=0.07551, over 1428899.56 frames. ], batch size: 49, lr: 1.95e-02, grad_scale: 8.0 +2023-03-08 19:37:53,246 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8332, 1.8281, 2.9352, 2.6255, 3.4767, 4.6297, 4.1284, 4.0848], + device='cuda:2'), covar=tensor([0.0404, 0.1000, 0.0810, 0.0670, 0.0905, 0.0043, 0.0208, 0.0145], + device='cuda:2'), in_proj_covar=tensor([0.0159, 0.0208, 0.0182, 0.0197, 0.0289, 0.0113, 0.0176, 0.0147], + device='cuda:2'), out_proj_covar=tensor([1.1524e-04, 1.5241e-04, 1.4261e-04, 1.3372e-04, 2.1131e-04, 7.6157e-05, + 1.2798e-04, 1.0669e-04], device='cuda:2') +2023-03-08 19:37:53,480 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-08 19:38:08,635 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6189, 3.0443, 4.2574, 4.1336, 2.7943, 4.6766, 4.1540, 2.5917], + device='cuda:2'), covar=tensor([0.0387, 0.1239, 0.0141, 0.0171, 0.1527, 0.0101, 0.0353, 0.1191], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0191, 0.0102, 0.0114, 0.0187, 0.0143, 0.0153, 0.0179], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 19:38:36,389 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18316.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:38:41,804 INFO [train.py:898] (2/4) Epoch 6, batch 150, loss[loss=0.2392, simple_loss=0.318, pruned_loss=0.08018, over 18582.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3025, pruned_loss=0.07427, over 1921358.55 frames. ], batch size: 54, lr: 1.94e-02, grad_scale: 8.0 +2023-03-08 19:38:48,544 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.997e+02 4.126e+02 4.935e+02 6.131e+02 1.362e+03, threshold=9.869e+02, percent-clipped=2.0 +2023-03-08 19:39:22,904 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18356.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:39:40,014 INFO [train.py:898] (2/4) Epoch 6, batch 200, loss[loss=0.2211, simple_loss=0.2984, pruned_loss=0.07188, over 18283.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.3034, pruned_loss=0.07493, over 2302909.27 frames. ], batch size: 49, lr: 1.94e-02, grad_scale: 8.0 +2023-03-08 19:39:44,681 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3181, 4.9449, 5.3834, 5.4077, 5.0214, 5.9059, 5.6367, 5.2503], + device='cuda:2'), covar=tensor([0.0864, 0.0737, 0.0648, 0.0555, 0.1703, 0.0740, 0.0469, 0.1836], + device='cuda:2'), in_proj_covar=tensor([0.0256, 0.0194, 0.0200, 0.0193, 0.0240, 0.0288, 0.0186, 0.0279], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-08 19:39:57,719 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18386.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:40:19,054 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18404.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:40:39,255 INFO [train.py:898] (2/4) Epoch 6, batch 250, loss[loss=0.2553, simple_loss=0.3241, pruned_loss=0.09326, over 12527.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.3043, pruned_loss=0.07571, over 2577790.10 frames. ], batch size: 130, lr: 1.94e-02, grad_scale: 8.0 +2023-03-08 19:40:46,043 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.187e+02 4.523e+02 5.806e+02 6.957e+02 1.437e+03, threshold=1.161e+03, percent-clipped=4.0 +2023-03-08 19:40:54,351 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18434.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:40:58,579 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7385, 1.8574, 3.1430, 2.5751, 3.6532, 5.0832, 4.2820, 4.3806], + device='cuda:2'), covar=tensor([0.0489, 0.1086, 0.1146, 0.0778, 0.0976, 0.0034, 0.0257, 0.0168], + device='cuda:2'), in_proj_covar=tensor([0.0162, 0.0211, 0.0190, 0.0198, 0.0291, 0.0115, 0.0181, 0.0149], + device='cuda:2'), out_proj_covar=tensor([1.1793e-04, 1.5410e-04, 1.4665e-04, 1.3419e-04, 2.1324e-04, 7.7309e-05, + 1.3111e-04, 1.0859e-04], device='cuda:2') +2023-03-08 19:41:04,204 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2154, 1.9913, 3.3154, 2.9949, 3.9589, 5.3394, 4.5362, 4.4634], + device='cuda:2'), covar=tensor([0.0403, 0.1040, 0.0990, 0.0621, 0.0902, 0.0024, 0.0246, 0.0169], + device='cuda:2'), in_proj_covar=tensor([0.0162, 0.0210, 0.0189, 0.0198, 0.0290, 0.0115, 0.0181, 0.0148], + device='cuda:2'), out_proj_covar=tensor([1.1754e-04, 1.5355e-04, 1.4613e-04, 1.3366e-04, 2.1231e-04, 7.6994e-05, + 1.3070e-04, 1.0817e-04], device='cuda:2') +2023-03-08 19:41:07,820 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.90 vs. limit=5.0 +2023-03-08 19:41:38,868 INFO [train.py:898] (2/4) Epoch 6, batch 300, loss[loss=0.2069, simple_loss=0.2835, pruned_loss=0.06516, over 18499.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.3034, pruned_loss=0.07451, over 2815384.50 frames. ], batch size: 44, lr: 1.94e-02, grad_scale: 8.0 +2023-03-08 19:41:40,329 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4995, 3.2240, 1.7423, 4.2809, 2.9534, 4.5109, 2.0992, 3.9431], + device='cuda:2'), covar=tensor([0.0522, 0.0944, 0.1585, 0.0379, 0.0970, 0.0150, 0.1281, 0.0321], + device='cuda:2'), in_proj_covar=tensor([0.0155, 0.0191, 0.0163, 0.0171, 0.0167, 0.0129, 0.0172, 0.0160], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 19:42:01,360 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18490.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:42:08,393 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.21 vs. limit=5.0 +2023-03-08 19:42:21,837 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18508.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 19:42:36,903 INFO [train.py:898] (2/4) Epoch 6, batch 350, loss[loss=0.2293, simple_loss=0.3166, pruned_loss=0.07101, over 18632.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3021, pruned_loss=0.07386, over 2997180.48 frames. ], batch size: 52, lr: 1.93e-02, grad_scale: 8.0 +2023-03-08 19:42:44,091 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.993e+02 4.095e+02 4.916e+02 6.743e+02 1.094e+03, threshold=9.831e+02, percent-clipped=0.0 +2023-03-08 19:42:47,459 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18530.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:42:48,605 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18531.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:43:11,083 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18551.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:43:31,833 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18569.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 19:43:33,755 INFO [train.py:898] (2/4) Epoch 6, batch 400, loss[loss=0.2556, simple_loss=0.3311, pruned_loss=0.09005, over 18483.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.3033, pruned_loss=0.07492, over 3122860.88 frames. ], batch size: 59, lr: 1.93e-02, grad_scale: 8.0 +2023-03-08 19:43:35,689 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2189, 5.8324, 5.2281, 5.5964, 5.2271, 5.3933, 5.8582, 5.8016], + device='cuda:2'), covar=tensor([0.1143, 0.0632, 0.0492, 0.0660, 0.1513, 0.0566, 0.0495, 0.0605], + device='cuda:2'), in_proj_covar=tensor([0.0411, 0.0339, 0.0264, 0.0372, 0.0515, 0.0371, 0.0425, 0.0341], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003], + device='cuda:2') +2023-03-08 19:44:07,862 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4634, 3.0333, 1.3899, 4.2291, 2.7042, 4.4456, 1.9696, 3.7546], + device='cuda:2'), covar=tensor([0.0481, 0.0991, 0.1728, 0.0289, 0.0955, 0.0151, 0.1261, 0.0332], + device='cuda:2'), in_proj_covar=tensor([0.0153, 0.0190, 0.0161, 0.0170, 0.0166, 0.0132, 0.0172, 0.0158], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 19:44:26,322 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18616.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:44:31,605 INFO [train.py:898] (2/4) Epoch 6, batch 450, loss[loss=0.2124, simple_loss=0.2851, pruned_loss=0.0698, over 18362.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.3029, pruned_loss=0.07486, over 3229592.58 frames. ], batch size: 46, lr: 1.93e-02, grad_scale: 8.0 +2023-03-08 19:44:38,640 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.446e+02 4.323e+02 5.172e+02 6.972e+02 1.405e+03, threshold=1.034e+03, percent-clipped=7.0 +2023-03-08 19:45:20,698 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18664.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:45:29,548 INFO [train.py:898] (2/4) Epoch 6, batch 500, loss[loss=0.211, simple_loss=0.3014, pruned_loss=0.06029, over 18344.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.3021, pruned_loss=0.07448, over 3315268.21 frames. ], batch size: 55, lr: 1.93e-02, grad_scale: 8.0 +2023-03-08 19:45:33,131 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18674.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:46:03,546 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5347, 4.5335, 5.2861, 3.3629, 4.4654, 3.0472, 3.2641, 2.5473], + device='cuda:2'), covar=tensor([0.0476, 0.0332, 0.0047, 0.0381, 0.0457, 0.1392, 0.1556, 0.1158], + device='cuda:2'), in_proj_covar=tensor([0.0165, 0.0175, 0.0083, 0.0137, 0.0193, 0.0219, 0.0203, 0.0181], + device='cuda:2'), out_proj_covar=tensor([1.5739e-04, 1.7263e-04, 8.3851e-05, 1.3450e-04, 1.8863e-04, 2.1234e-04, + 2.0468e-04, 1.7850e-04], device='cuda:2') +2023-03-08 19:46:28,532 INFO [train.py:898] (2/4) Epoch 6, batch 550, loss[loss=0.234, simple_loss=0.315, pruned_loss=0.07657, over 18387.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.3035, pruned_loss=0.07489, over 3381373.49 frames. ], batch size: 52, lr: 1.92e-02, grad_scale: 8.0 +2023-03-08 19:46:35,264 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.775e+02 4.304e+02 5.425e+02 6.689e+02 1.717e+03, threshold=1.085e+03, percent-clipped=5.0 +2023-03-08 19:46:38,009 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9866, 5.2982, 2.8395, 5.1024, 4.9525, 5.3854, 5.0897, 2.3842], + device='cuda:2'), covar=tensor([0.0169, 0.0051, 0.0757, 0.0066, 0.0066, 0.0049, 0.0106, 0.1091], + device='cuda:2'), in_proj_covar=tensor([0.0064, 0.0051, 0.0083, 0.0066, 0.0061, 0.0051, 0.0066, 0.0087], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-08 19:46:45,310 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18735.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:47:26,400 INFO [train.py:898] (2/4) Epoch 6, batch 600, loss[loss=0.268, simple_loss=0.3386, pruned_loss=0.09875, over 18072.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3028, pruned_loss=0.07455, over 3438347.01 frames. ], batch size: 65, lr: 1.92e-02, grad_scale: 8.0 +2023-03-08 19:47:49,823 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.2586, 3.8115, 5.3123, 4.4365, 3.1649, 3.1691, 4.6141, 5.3653], + device='cuda:2'), covar=tensor([0.0921, 0.1260, 0.0045, 0.0225, 0.0785, 0.0896, 0.0236, 0.0083], + device='cuda:2'), in_proj_covar=tensor([0.0129, 0.0180, 0.0069, 0.0138, 0.0159, 0.0160, 0.0139, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0002, 0.0003, 0.0002, 0.0001], + device='cuda:2') +2023-03-08 19:48:25,645 INFO [train.py:898] (2/4) Epoch 6, batch 650, loss[loss=0.2031, simple_loss=0.281, pruned_loss=0.06262, over 18418.00 frames. ], tot_loss[loss=0.2262, simple_loss=0.303, pruned_loss=0.07472, over 3459355.68 frames. ], batch size: 48, lr: 1.92e-02, grad_scale: 8.0 +2023-03-08 19:48:33,820 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.489e+02 4.300e+02 5.176e+02 5.986e+02 1.905e+03, threshold=1.035e+03, percent-clipped=4.0 +2023-03-08 19:48:37,591 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18830.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:48:37,647 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8453, 4.3185, 4.4412, 3.3866, 3.5271, 3.6174, 2.2914, 1.9574], + device='cuda:2'), covar=tensor([0.0156, 0.0188, 0.0059, 0.0201, 0.0258, 0.0117, 0.0704, 0.0874], + device='cuda:2'), in_proj_covar=tensor([0.0041, 0.0038, 0.0033, 0.0047, 0.0063, 0.0040, 0.0061, 0.0067], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004], + device='cuda:2') +2023-03-08 19:48:38,670 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18831.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:48:56,021 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18846.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:49:17,269 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18864.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 19:49:25,165 INFO [train.py:898] (2/4) Epoch 6, batch 700, loss[loss=0.2167, simple_loss=0.2975, pruned_loss=0.06799, over 18497.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3029, pruned_loss=0.07434, over 3497780.94 frames. ], batch size: 51, lr: 1.92e-02, grad_scale: 8.0 +2023-03-08 19:49:33,679 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18878.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:49:34,691 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=18879.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:50:23,147 INFO [train.py:898] (2/4) Epoch 6, batch 750, loss[loss=0.1996, simple_loss=0.2725, pruned_loss=0.06336, over 18465.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.3036, pruned_loss=0.07486, over 3517310.71 frames. ], batch size: 44, lr: 1.91e-02, grad_scale: 8.0 +2023-03-08 19:50:29,878 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.737e+02 4.561e+02 5.480e+02 6.846e+02 1.883e+03, threshold=1.096e+03, percent-clipped=6.0 +2023-03-08 19:50:33,959 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18930.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:50:56,363 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.6972, 5.2750, 5.6454, 5.4533, 5.3298, 6.2322, 5.7641, 5.6233], + device='cuda:2'), covar=tensor([0.0624, 0.0459, 0.0523, 0.0467, 0.1147, 0.0604, 0.0449, 0.1269], + device='cuda:2'), in_proj_covar=tensor([0.0261, 0.0194, 0.0202, 0.0194, 0.0245, 0.0288, 0.0187, 0.0281], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-08 19:51:21,265 INFO [train.py:898] (2/4) Epoch 6, batch 800, loss[loss=0.2379, simple_loss=0.316, pruned_loss=0.07987, over 17848.00 frames. ], tot_loss[loss=0.2271, simple_loss=0.3039, pruned_loss=0.07516, over 3537647.32 frames. ], batch size: 70, lr: 1.91e-02, grad_scale: 8.0 +2023-03-08 19:51:22,160 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0241, 4.4780, 4.7235, 3.5695, 3.5980, 3.6161, 2.3237, 2.0931], + device='cuda:2'), covar=tensor([0.0183, 0.0201, 0.0050, 0.0239, 0.0295, 0.0149, 0.0811, 0.0929], + device='cuda:2'), in_proj_covar=tensor([0.0044, 0.0041, 0.0034, 0.0050, 0.0068, 0.0043, 0.0065, 0.0071], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004], + device='cuda:2') +2023-03-08 19:51:46,551 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18991.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:52:20,915 INFO [train.py:898] (2/4) Epoch 6, batch 850, loss[loss=0.2035, simple_loss=0.2748, pruned_loss=0.06613, over 18256.00 frames. ], tot_loss[loss=0.2273, simple_loss=0.3039, pruned_loss=0.07533, over 3550136.33 frames. ], batch size: 45, lr: 1.91e-02, grad_scale: 8.0 +2023-03-08 19:52:28,231 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.682e+02 3.950e+02 4.754e+02 5.949e+02 2.076e+03, threshold=9.508e+02, percent-clipped=3.0 +2023-03-08 19:52:31,927 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19030.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:53:09,554 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-08 19:53:19,108 INFO [train.py:898] (2/4) Epoch 6, batch 900, loss[loss=0.2046, simple_loss=0.2892, pruned_loss=0.06004, over 18496.00 frames. ], tot_loss[loss=0.2259, simple_loss=0.3026, pruned_loss=0.07462, over 3568089.03 frames. ], batch size: 51, lr: 1.91e-02, grad_scale: 8.0 +2023-03-08 19:53:30,435 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=2.01 vs. limit=2.0 +2023-03-08 19:54:12,124 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2151, 5.0736, 5.2600, 5.1865, 5.1296, 5.8791, 5.4930, 5.3048], + device='cuda:2'), covar=tensor([0.0835, 0.0537, 0.0625, 0.0580, 0.1252, 0.0687, 0.0573, 0.1433], + device='cuda:2'), in_proj_covar=tensor([0.0260, 0.0193, 0.0202, 0.0195, 0.0243, 0.0281, 0.0188, 0.0282], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-08 19:54:17,503 INFO [train.py:898] (2/4) Epoch 6, batch 950, loss[loss=0.2233, simple_loss=0.3037, pruned_loss=0.07148, over 18468.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.302, pruned_loss=0.07392, over 3579730.78 frames. ], batch size: 59, lr: 1.90e-02, grad_scale: 8.0 +2023-03-08 19:54:21,260 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.1979, 5.3449, 2.7432, 5.0718, 4.9927, 5.3392, 5.0739, 2.6542], + device='cuda:2'), covar=tensor([0.0123, 0.0051, 0.0635, 0.0065, 0.0069, 0.0061, 0.0099, 0.0885], + device='cuda:2'), in_proj_covar=tensor([0.0063, 0.0053, 0.0082, 0.0065, 0.0062, 0.0052, 0.0067, 0.0087], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0004, 0.0004], + device='cuda:2') +2023-03-08 19:54:24,265 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.702e+02 4.325e+02 5.195e+02 6.173e+02 1.123e+03, threshold=1.039e+03, percent-clipped=4.0 +2023-03-08 19:54:26,840 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19129.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 19:54:47,486 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19146.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:55:08,232 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19164.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 19:55:15,695 INFO [train.py:898] (2/4) Epoch 6, batch 1000, loss[loss=0.2225, simple_loss=0.2954, pruned_loss=0.07479, over 18505.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3019, pruned_loss=0.07389, over 3579566.88 frames. ], batch size: 47, lr: 1.90e-02, grad_scale: 8.0 +2023-03-08 19:55:38,217 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19190.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 19:55:42,587 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19194.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:56:00,567 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.22 vs. limit=5.0 +2023-03-08 19:56:05,355 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19212.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 19:56:15,296 INFO [train.py:898] (2/4) Epoch 6, batch 1050, loss[loss=0.2067, simple_loss=0.2867, pruned_loss=0.06333, over 18488.00 frames. ], tot_loss[loss=0.224, simple_loss=0.3013, pruned_loss=0.07335, over 3592199.39 frames. ], batch size: 47, lr: 1.90e-02, grad_scale: 8.0 +2023-03-08 19:56:21,992 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.826e+02 4.167e+02 5.198e+02 6.760e+02 1.282e+03, threshold=1.040e+03, percent-clipped=3.0 +2023-03-08 19:56:38,387 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.82 vs. limit=5.0 +2023-03-08 19:57:11,319 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3177, 4.3961, 2.3620, 4.3868, 5.2266, 2.5515, 4.0959, 3.9356], + device='cuda:2'), covar=tensor([0.0055, 0.0943, 0.1599, 0.0424, 0.0040, 0.1330, 0.0565, 0.0711], + device='cuda:2'), in_proj_covar=tensor([0.0089, 0.0174, 0.0177, 0.0175, 0.0075, 0.0166, 0.0186, 0.0186], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0003], + device='cuda:2') +2023-03-08 19:57:14,181 INFO [train.py:898] (2/4) Epoch 6, batch 1100, loss[loss=0.2573, simple_loss=0.3339, pruned_loss=0.09034, over 18469.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3015, pruned_loss=0.07374, over 3589690.34 frames. ], batch size: 59, lr: 1.90e-02, grad_scale: 8.0 +2023-03-08 19:57:31,217 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19286.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:57:32,575 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0518, 4.0476, 5.0701, 3.1956, 4.4144, 2.7996, 2.9540, 2.3732], + device='cuda:2'), covar=tensor([0.0574, 0.0500, 0.0042, 0.0411, 0.0418, 0.1761, 0.1768, 0.1312], + device='cuda:2'), in_proj_covar=tensor([0.0170, 0.0181, 0.0087, 0.0142, 0.0196, 0.0227, 0.0214, 0.0186], + device='cuda:2'), out_proj_covar=tensor([1.6163e-04, 1.7736e-04, 8.7371e-05, 1.3904e-04, 1.9123e-04, 2.1919e-04, + 2.1420e-04, 1.8326e-04], device='cuda:2') +2023-03-08 19:58:13,169 INFO [train.py:898] (2/4) Epoch 6, batch 1150, loss[loss=0.2067, simple_loss=0.2829, pruned_loss=0.06523, over 18409.00 frames. ], tot_loss[loss=0.2254, simple_loss=0.3024, pruned_loss=0.07425, over 3588594.89 frames. ], batch size: 48, lr: 1.90e-02, grad_scale: 8.0 +2023-03-08 19:58:20,057 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.317e+02 3.874e+02 4.905e+02 6.066e+02 2.100e+03, threshold=9.811e+02, percent-clipped=2.0 +2023-03-08 19:58:23,748 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19330.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:59:11,796 INFO [train.py:898] (2/4) Epoch 6, batch 1200, loss[loss=0.2491, simple_loss=0.326, pruned_loss=0.08605, over 17978.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3028, pruned_loss=0.07445, over 3590413.98 frames. ], batch size: 65, lr: 1.89e-02, grad_scale: 8.0 +2023-03-08 19:59:19,998 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19378.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:59:36,068 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.87 vs. limit=2.0 +2023-03-08 19:59:37,946 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19394.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 19:59:55,538 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.96 vs. limit=2.0 +2023-03-08 20:00:10,018 INFO [train.py:898] (2/4) Epoch 6, batch 1250, loss[loss=0.1872, simple_loss=0.2621, pruned_loss=0.0562, over 18255.00 frames. ], tot_loss[loss=0.225, simple_loss=0.3021, pruned_loss=0.07397, over 3593477.90 frames. ], batch size: 45, lr: 1.89e-02, grad_scale: 8.0 +2023-03-08 20:00:16,821 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.768e+02 4.335e+02 5.425e+02 6.798e+02 1.467e+03, threshold=1.085e+03, percent-clipped=6.0 +2023-03-08 20:00:49,616 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19455.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:01:08,437 INFO [train.py:898] (2/4) Epoch 6, batch 1300, loss[loss=0.1939, simple_loss=0.2768, pruned_loss=0.05544, over 18397.00 frames. ], tot_loss[loss=0.2243, simple_loss=0.3014, pruned_loss=0.07365, over 3589176.56 frames. ], batch size: 48, lr: 1.89e-02, grad_scale: 8.0 +2023-03-08 20:01:25,239 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19485.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 20:02:07,617 INFO [train.py:898] (2/4) Epoch 6, batch 1350, loss[loss=0.1783, simple_loss=0.2604, pruned_loss=0.04808, over 18375.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.301, pruned_loss=0.07366, over 3581257.07 frames. ], batch size: 46, lr: 1.89e-02, grad_scale: 8.0 +2023-03-08 20:02:10,103 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2404, 5.9302, 5.4513, 5.5972, 5.3405, 5.4005, 5.9121, 5.9074], + device='cuda:2'), covar=tensor([0.1029, 0.0458, 0.0396, 0.0652, 0.1276, 0.0648, 0.0452, 0.0454], + device='cuda:2'), in_proj_covar=tensor([0.0426, 0.0342, 0.0268, 0.0376, 0.0524, 0.0377, 0.0440, 0.0348], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-08 20:02:15,076 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.530e+02 3.979e+02 4.823e+02 6.493e+02 1.049e+03, threshold=9.645e+02, percent-clipped=0.0 +2023-03-08 20:03:05,921 INFO [train.py:898] (2/4) Epoch 6, batch 1400, loss[loss=0.213, simple_loss=0.2981, pruned_loss=0.06395, over 18401.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.3018, pruned_loss=0.07392, over 3574302.45 frames. ], batch size: 52, lr: 1.88e-02, grad_scale: 8.0 +2023-03-08 20:03:23,795 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19586.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:04:01,670 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19618.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:04:04,685 INFO [train.py:898] (2/4) Epoch 6, batch 1450, loss[loss=0.1901, simple_loss=0.2629, pruned_loss=0.0587, over 18374.00 frames. ], tot_loss[loss=0.2241, simple_loss=0.3011, pruned_loss=0.07358, over 3579185.23 frames. ], batch size: 42, lr: 1.88e-02, grad_scale: 8.0 +2023-03-08 20:04:11,634 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.837e+02 4.168e+02 4.993e+02 6.128e+02 1.216e+03, threshold=9.987e+02, percent-clipped=6.0 +2023-03-08 20:04:20,346 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19634.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:05:03,125 INFO [train.py:898] (2/4) Epoch 6, batch 1500, loss[loss=0.2081, simple_loss=0.2844, pruned_loss=0.06592, over 18352.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.3016, pruned_loss=0.07365, over 3582059.30 frames. ], batch size: 46, lr: 1.88e-02, grad_scale: 8.0 +2023-03-08 20:05:05,829 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6364, 1.7229, 2.8069, 2.7682, 3.3832, 4.9861, 4.2169, 3.8327], + device='cuda:2'), covar=tensor([0.0685, 0.1550, 0.1480, 0.0903, 0.1507, 0.0044, 0.0366, 0.0344], + device='cuda:2'), in_proj_covar=tensor([0.0167, 0.0219, 0.0203, 0.0205, 0.0301, 0.0123, 0.0188, 0.0154], + device='cuda:2'), out_proj_covar=tensor([1.1857e-04, 1.5703e-04, 1.5428e-04, 1.3665e-04, 2.1624e-04, 8.2431e-05, + 1.3259e-04, 1.0968e-04], device='cuda:2') +2023-03-08 20:05:12,437 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19679.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:06:01,662 INFO [train.py:898] (2/4) Epoch 6, batch 1550, loss[loss=0.2373, simple_loss=0.3131, pruned_loss=0.0808, over 15826.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3019, pruned_loss=0.07361, over 3584163.80 frames. ], batch size: 94, lr: 1.88e-02, grad_scale: 8.0 +2023-03-08 20:06:09,095 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.820e+02 4.112e+02 5.168e+02 6.335e+02 1.578e+03, threshold=1.034e+03, percent-clipped=4.0 +2023-03-08 20:06:11,173 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-03-08 20:06:36,312 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19750.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:07:01,071 INFO [train.py:898] (2/4) Epoch 6, batch 1600, loss[loss=0.2465, simple_loss=0.3273, pruned_loss=0.0829, over 18229.00 frames. ], tot_loss[loss=0.225, simple_loss=0.3019, pruned_loss=0.074, over 3577592.99 frames. ], batch size: 60, lr: 1.87e-02, grad_scale: 8.0 +2023-03-08 20:07:13,076 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5719, 6.0514, 5.4333, 5.8573, 5.6341, 5.6141, 6.1476, 6.0541], + device='cuda:2'), covar=tensor([0.1063, 0.0588, 0.0394, 0.0624, 0.1377, 0.0595, 0.0463, 0.0573], + device='cuda:2'), in_proj_covar=tensor([0.0430, 0.0351, 0.0276, 0.0377, 0.0529, 0.0379, 0.0447, 0.0358], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-08 20:07:17,658 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19785.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 20:07:42,732 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4609, 6.0314, 5.3358, 5.7931, 5.6092, 5.4909, 6.1319, 6.0552], + device='cuda:2'), covar=tensor([0.0968, 0.0535, 0.0370, 0.0631, 0.1200, 0.0655, 0.0387, 0.0516], + device='cuda:2'), in_proj_covar=tensor([0.0429, 0.0347, 0.0274, 0.0376, 0.0527, 0.0378, 0.0447, 0.0355], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-08 20:07:58,574 INFO [train.py:898] (2/4) Epoch 6, batch 1650, loss[loss=0.2236, simple_loss=0.3099, pruned_loss=0.06865, over 18189.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3017, pruned_loss=0.07386, over 3584865.83 frames. ], batch size: 60, lr: 1.87e-02, grad_scale: 8.0 +2023-03-08 20:08:01,011 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.92 vs. limit=5.0 +2023-03-08 20:08:06,542 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.131e+02 4.738e+02 5.632e+02 7.460e+02 1.782e+03, threshold=1.126e+03, percent-clipped=8.0 +2023-03-08 20:08:13,471 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=19833.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 20:08:56,816 INFO [train.py:898] (2/4) Epoch 6, batch 1700, loss[loss=0.2071, simple_loss=0.2835, pruned_loss=0.06533, over 18389.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.3006, pruned_loss=0.07328, over 3586057.53 frames. ], batch size: 42, lr: 1.87e-02, grad_scale: 8.0 +2023-03-08 20:09:55,327 INFO [train.py:898] (2/4) Epoch 6, batch 1750, loss[loss=0.2216, simple_loss=0.3105, pruned_loss=0.06634, over 17820.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.2997, pruned_loss=0.07262, over 3587781.81 frames. ], batch size: 70, lr: 1.87e-02, grad_scale: 8.0 +2023-03-08 20:10:02,907 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.466e+02 3.770e+02 4.789e+02 6.100e+02 1.481e+03, threshold=9.579e+02, percent-clipped=1.0 +2023-03-08 20:10:53,733 INFO [train.py:898] (2/4) Epoch 6, batch 1800, loss[loss=0.2199, simple_loss=0.2976, pruned_loss=0.07113, over 17998.00 frames. ], tot_loss[loss=0.223, simple_loss=0.3002, pruned_loss=0.07288, over 3585118.40 frames. ], batch size: 65, lr: 1.87e-02, grad_scale: 8.0 +2023-03-08 20:10:57,376 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19974.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:11:13,411 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3974, 5.9994, 5.3395, 5.7972, 5.5298, 5.5666, 6.0529, 6.0081], + device='cuda:2'), covar=tensor([0.0906, 0.0513, 0.0403, 0.0540, 0.1106, 0.0575, 0.0401, 0.0487], + device='cuda:2'), in_proj_covar=tensor([0.0424, 0.0345, 0.0269, 0.0372, 0.0523, 0.0379, 0.0449, 0.0352], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-08 20:11:55,728 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20020.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:11:56,567 INFO [train.py:898] (2/4) Epoch 6, batch 1850, loss[loss=0.2362, simple_loss=0.3117, pruned_loss=0.08034, over 16130.00 frames. ], tot_loss[loss=0.223, simple_loss=0.3, pruned_loss=0.07293, over 3595345.25 frames. ], batch size: 94, lr: 1.86e-02, grad_scale: 8.0 +2023-03-08 20:12:03,178 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.737e+02 4.293e+02 5.407e+02 6.655e+02 1.124e+03, threshold=1.081e+03, percent-clipped=3.0 +2023-03-08 20:12:31,057 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20050.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:12:55,175 INFO [train.py:898] (2/4) Epoch 6, batch 1900, loss[loss=0.2191, simple_loss=0.3024, pruned_loss=0.06792, over 18387.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.2996, pruned_loss=0.07274, over 3593196.19 frames. ], batch size: 52, lr: 1.86e-02, grad_scale: 8.0 +2023-03-08 20:13:07,091 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20081.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:13:17,217 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20089.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:13:25,525 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5694, 4.5830, 4.6438, 4.4536, 4.4880, 4.5316, 4.9086, 4.8870], + device='cuda:2'), covar=tensor([0.0066, 0.0068, 0.0059, 0.0086, 0.0062, 0.0096, 0.0055, 0.0076], + device='cuda:2'), in_proj_covar=tensor([0.0066, 0.0050, 0.0049, 0.0064, 0.0054, 0.0073, 0.0062, 0.0060], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 20:13:27,666 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20098.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:13:44,380 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20112.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:13:46,657 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3983, 3.0781, 1.6717, 4.2342, 2.8026, 4.2974, 1.6622, 3.5464], + device='cuda:2'), covar=tensor([0.0497, 0.0825, 0.1374, 0.0251, 0.0784, 0.0180, 0.1356, 0.0414], + device='cuda:2'), in_proj_covar=tensor([0.0162, 0.0194, 0.0167, 0.0177, 0.0167, 0.0145, 0.0173, 0.0166], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 20:13:54,342 INFO [train.py:898] (2/4) Epoch 6, batch 1950, loss[loss=0.23, simple_loss=0.3097, pruned_loss=0.07513, over 18551.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3006, pruned_loss=0.0729, over 3599867.37 frames. ], batch size: 54, lr: 1.86e-02, grad_scale: 8.0 +2023-03-08 20:14:01,197 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.918e+02 4.095e+02 5.447e+02 6.778e+02 1.959e+03, threshold=1.089e+03, percent-clipped=6.0 +2023-03-08 20:14:28,689 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20150.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:14:52,816 INFO [train.py:898] (2/4) Epoch 6, batch 2000, loss[loss=0.2319, simple_loss=0.3217, pruned_loss=0.07107, over 18018.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.3011, pruned_loss=0.07267, over 3605378.20 frames. ], batch size: 65, lr: 1.86e-02, grad_scale: 8.0 +2023-03-08 20:14:55,584 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20173.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:15:32,062 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20204.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:15:51,544 INFO [train.py:898] (2/4) Epoch 6, batch 2050, loss[loss=0.2197, simple_loss=0.2938, pruned_loss=0.07277, over 18385.00 frames. ], tot_loss[loss=0.2236, simple_loss=0.3013, pruned_loss=0.07292, over 3591947.86 frames. ], batch size: 50, lr: 1.86e-02, grad_scale: 4.0 +2023-03-08 20:15:59,262 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.572e+02 4.223e+02 5.010e+02 6.268e+02 1.616e+03, threshold=1.002e+03, percent-clipped=2.0 +2023-03-08 20:16:43,490 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20265.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:16:49,846 INFO [train.py:898] (2/4) Epoch 6, batch 2100, loss[loss=0.2495, simple_loss=0.3281, pruned_loss=0.08542, over 18626.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.302, pruned_loss=0.0734, over 3584794.92 frames. ], batch size: 52, lr: 1.85e-02, grad_scale: 4.0 +2023-03-08 20:16:53,623 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20274.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:17:49,004 INFO [train.py:898] (2/4) Epoch 6, batch 2150, loss[loss=0.265, simple_loss=0.3287, pruned_loss=0.1007, over 12665.00 frames. ], tot_loss[loss=0.2249, simple_loss=0.3024, pruned_loss=0.07369, over 3568726.48 frames. ], batch size: 131, lr: 1.85e-02, grad_scale: 4.0 +2023-03-08 20:17:50,330 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20322.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:17:56,868 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.681e+02 4.510e+02 5.215e+02 6.724e+02 1.670e+03, threshold=1.043e+03, percent-clipped=8.0 +2023-03-08 20:18:07,617 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0498, 4.1796, 2.1723, 4.2359, 4.9311, 2.3323, 3.5419, 3.6394], + device='cuda:2'), covar=tensor([0.0057, 0.0855, 0.1595, 0.0443, 0.0062, 0.1354, 0.0691, 0.0729], + device='cuda:2'), in_proj_covar=tensor([0.0091, 0.0180, 0.0183, 0.0178, 0.0076, 0.0168, 0.0190, 0.0186], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 20:18:47,269 INFO [train.py:898] (2/4) Epoch 6, batch 2200, loss[loss=0.2475, simple_loss=0.3221, pruned_loss=0.0864, over 18160.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.3034, pruned_loss=0.07417, over 3562045.57 frames. ], batch size: 62, lr: 1.85e-02, grad_scale: 4.0 +2023-03-08 20:18:53,246 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20376.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:19:07,117 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20388.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:19:43,468 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20418.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:19:46,526 INFO [train.py:898] (2/4) Epoch 6, batch 2250, loss[loss=0.2222, simple_loss=0.305, pruned_loss=0.06972, over 18301.00 frames. ], tot_loss[loss=0.2247, simple_loss=0.3023, pruned_loss=0.07359, over 3568403.39 frames. ], batch size: 54, lr: 1.85e-02, grad_scale: 4.0 +2023-03-08 20:19:54,690 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.728e+02 4.214e+02 4.964e+02 6.088e+02 1.302e+03, threshold=9.929e+02, percent-clipped=3.0 +2023-03-08 20:20:13,879 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20445.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:20:18,815 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20449.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:20:41,608 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20468.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:20:44,864 INFO [train.py:898] (2/4) Epoch 6, batch 2300, loss[loss=0.2382, simple_loss=0.3193, pruned_loss=0.07856, over 17711.00 frames. ], tot_loss[loss=0.2246, simple_loss=0.3021, pruned_loss=0.07356, over 3578202.70 frames. ], batch size: 70, lr: 1.84e-02, grad_scale: 4.0 +2023-03-08 20:20:54,724 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20479.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:21:43,789 INFO [train.py:898] (2/4) Epoch 6, batch 2350, loss[loss=0.1964, simple_loss=0.2708, pruned_loss=0.06101, over 18490.00 frames. ], tot_loss[loss=0.223, simple_loss=0.3006, pruned_loss=0.07269, over 3592843.41 frames. ], batch size: 44, lr: 1.84e-02, grad_scale: 4.0 +2023-03-08 20:21:52,079 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.562e+02 3.728e+02 4.764e+02 5.894e+02 1.500e+03, threshold=9.528e+02, percent-clipped=4.0 +2023-03-08 20:22:29,281 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20560.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:22:42,383 INFO [train.py:898] (2/4) Epoch 6, batch 2400, loss[loss=0.2285, simple_loss=0.3085, pruned_loss=0.07426, over 18351.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.3004, pruned_loss=0.07239, over 3596009.45 frames. ], batch size: 56, lr: 1.84e-02, grad_scale: 8.0 +2023-03-08 20:23:02,864 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20588.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:23:16,413 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.3475, 4.9305, 4.8769, 4.9123, 4.5513, 4.8324, 4.2246, 4.6813], + device='cuda:2'), covar=tensor([0.0233, 0.0267, 0.0224, 0.0217, 0.0357, 0.0202, 0.1088, 0.0280], + device='cuda:2'), in_proj_covar=tensor([0.0137, 0.0181, 0.0166, 0.0165, 0.0170, 0.0178, 0.0248, 0.0163], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005], + device='cuda:2') +2023-03-08 20:23:41,480 INFO [train.py:898] (2/4) Epoch 6, batch 2450, loss[loss=0.2017, simple_loss=0.2775, pruned_loss=0.06296, over 18533.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.3004, pruned_loss=0.07214, over 3593046.41 frames. ], batch size: 49, lr: 1.84e-02, grad_scale: 8.0 +2023-03-08 20:23:49,392 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.965e+02 4.013e+02 4.826e+02 5.800e+02 1.376e+03, threshold=9.653e+02, percent-clipped=2.0 +2023-03-08 20:24:13,885 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20649.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:24:38,626 INFO [train.py:898] (2/4) Epoch 6, batch 2500, loss[loss=0.2016, simple_loss=0.2865, pruned_loss=0.05833, over 18545.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.301, pruned_loss=0.07263, over 3600073.53 frames. ], batch size: 49, lr: 1.84e-02, grad_scale: 8.0 +2023-03-08 20:24:43,496 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0941, 2.9714, 4.2533, 4.4062, 3.0147, 4.8590, 4.0432, 3.1614], + device='cuda:2'), covar=tensor([0.0290, 0.1245, 0.0214, 0.0169, 0.1263, 0.0093, 0.0359, 0.0935], + device='cuda:2'), in_proj_covar=tensor([0.0155, 0.0195, 0.0109, 0.0117, 0.0192, 0.0148, 0.0161, 0.0179], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 20:24:44,626 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20676.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:25:35,729 INFO [train.py:898] (2/4) Epoch 6, batch 2550, loss[loss=0.2093, simple_loss=0.2863, pruned_loss=0.06609, over 18552.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.3003, pruned_loss=0.07228, over 3601700.55 frames. ], batch size: 49, lr: 1.83e-02, grad_scale: 4.0 +2023-03-08 20:25:40,323 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20724.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:25:43,432 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-08 20:25:45,642 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.233e+02 4.696e+02 5.623e+02 7.673e+02 1.890e+03, threshold=1.125e+03, percent-clipped=13.0 +2023-03-08 20:26:03,313 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20744.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:26:04,499 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20745.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:26:30,310 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20768.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:26:34,036 INFO [train.py:898] (2/4) Epoch 6, batch 2600, loss[loss=0.2136, simple_loss=0.2978, pruned_loss=0.06471, over 18505.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2997, pruned_loss=0.07234, over 3589125.70 frames. ], batch size: 51, lr: 1.83e-02, grad_scale: 4.0 +2023-03-08 20:26:38,090 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20774.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:26:48,877 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3277, 4.4346, 2.7940, 4.4970, 5.2788, 2.5071, 4.0349, 4.0948], + device='cuda:2'), covar=tensor([0.0046, 0.0826, 0.1262, 0.0425, 0.0040, 0.1338, 0.0574, 0.0612], + device='cuda:2'), in_proj_covar=tensor([0.0089, 0.0179, 0.0180, 0.0175, 0.0075, 0.0168, 0.0188, 0.0182], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-08 20:27:00,484 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20793.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:27:27,128 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20816.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:27:32,624 INFO [train.py:898] (2/4) Epoch 6, batch 2650, loss[loss=0.2035, simple_loss=0.2759, pruned_loss=0.06554, over 18369.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.3, pruned_loss=0.07211, over 3589376.75 frames. ], batch size: 46, lr: 1.83e-02, grad_scale: 4.0 +2023-03-08 20:27:43,391 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.822e+02 3.946e+02 4.764e+02 5.553e+02 1.236e+03, threshold=9.528e+02, percent-clipped=1.0 +2023-03-08 20:27:46,299 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-08 20:28:19,135 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20860.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:28:31,325 INFO [train.py:898] (2/4) Epoch 6, batch 2700, loss[loss=0.2149, simple_loss=0.2812, pruned_loss=0.07425, over 18460.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.3003, pruned_loss=0.07233, over 3595469.64 frames. ], batch size: 44, lr: 1.83e-02, grad_scale: 4.0 +2023-03-08 20:28:35,475 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.1991, 4.6026, 4.5873, 4.5994, 4.2858, 4.5239, 3.9676, 4.5344], + device='cuda:2'), covar=tensor([0.0215, 0.0287, 0.0243, 0.0256, 0.0317, 0.0219, 0.1039, 0.0227], + device='cuda:2'), in_proj_covar=tensor([0.0139, 0.0181, 0.0166, 0.0167, 0.0170, 0.0180, 0.0246, 0.0160], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005], + device='cuda:2') +2023-03-08 20:29:14,537 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=20908.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:29:16,937 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.2920, 5.2042, 2.3881, 5.0458, 4.9519, 5.1895, 4.9019, 2.2565], + device='cuda:2'), covar=tensor([0.0138, 0.0107, 0.1038, 0.0093, 0.0092, 0.0140, 0.0179, 0.1569], + device='cuda:2'), in_proj_covar=tensor([0.0066, 0.0056, 0.0084, 0.0069, 0.0065, 0.0055, 0.0069, 0.0090], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 20:29:28,203 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.2858, 5.3947, 2.9261, 5.1392, 5.0731, 5.4288, 5.1848, 2.8865], + device='cuda:2'), covar=tensor([0.0124, 0.0041, 0.0624, 0.0070, 0.0052, 0.0043, 0.0072, 0.0804], + device='cuda:2'), in_proj_covar=tensor([0.0066, 0.0056, 0.0084, 0.0069, 0.0065, 0.0055, 0.0069, 0.0090], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 20:29:28,995 INFO [train.py:898] (2/4) Epoch 6, batch 2750, loss[loss=0.2116, simple_loss=0.2918, pruned_loss=0.06569, over 18305.00 frames. ], tot_loss[loss=0.2235, simple_loss=0.301, pruned_loss=0.07299, over 3593989.66 frames. ], batch size: 54, lr: 1.83e-02, grad_scale: 4.0 +2023-03-08 20:29:38,697 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.441e+02 4.086e+02 5.349e+02 6.220e+02 9.692e+02, threshold=1.070e+03, percent-clipped=2.0 +2023-03-08 20:29:51,666 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4545, 1.7607, 2.9413, 2.7571, 3.6586, 5.2379, 4.5205, 4.3586], + device='cuda:2'), covar=tensor([0.0644, 0.1324, 0.1272, 0.0840, 0.1141, 0.0034, 0.0253, 0.0206], + device='cuda:2'), in_proj_covar=tensor([0.0177, 0.0227, 0.0212, 0.0209, 0.0308, 0.0129, 0.0197, 0.0159], + device='cuda:2'), out_proj_covar=tensor([1.2347e-04, 1.5992e-04, 1.5742e-04, 1.3601e-04, 2.1715e-04, 8.6138e-05, + 1.3526e-04, 1.1125e-04], device='cuda:2') +2023-03-08 20:29:57,131 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20944.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:30:27,641 INFO [train.py:898] (2/4) Epoch 6, batch 2800, loss[loss=0.2014, simple_loss=0.2742, pruned_loss=0.06432, over 18182.00 frames. ], tot_loss[loss=0.2225, simple_loss=0.3002, pruned_loss=0.07237, over 3601626.96 frames. ], batch size: 44, lr: 1.82e-02, grad_scale: 8.0 +2023-03-08 20:30:48,423 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2677, 5.2251, 4.6979, 5.2157, 5.1882, 4.6685, 5.1975, 4.8419], + device='cuda:2'), covar=tensor([0.0377, 0.0410, 0.1365, 0.0662, 0.0426, 0.0396, 0.0298, 0.0754], + device='cuda:2'), in_proj_covar=tensor([0.0330, 0.0377, 0.0521, 0.0300, 0.0272, 0.0343, 0.0366, 0.0465], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-08 20:30:51,408 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6445, 3.4071, 3.2429, 2.7417, 3.2313, 2.7401, 2.6887, 3.5372], + device='cuda:2'), covar=tensor([0.0027, 0.0066, 0.0082, 0.0106, 0.0083, 0.0148, 0.0139, 0.0061], + device='cuda:2'), in_proj_covar=tensor([0.0060, 0.0080, 0.0073, 0.0115, 0.0073, 0.0117, 0.0122, 0.0064], + device='cuda:2'), out_proj_covar=tensor([8.4953e-05, 1.2463e-04, 1.1188e-04, 1.8394e-04, 1.1147e-04, 1.8328e-04, + 1.9040e-04, 9.4736e-05], device='cuda:2') +2023-03-08 20:30:55,162 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.63 vs. limit=2.0 +2023-03-08 20:31:15,104 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0143, 5.5946, 5.1307, 5.3569, 5.0530, 5.2250, 5.6290, 5.5980], + device='cuda:2'), covar=tensor([0.0950, 0.0663, 0.0529, 0.0731, 0.1357, 0.0542, 0.0465, 0.0621], + device='cuda:2'), in_proj_covar=tensor([0.0440, 0.0352, 0.0280, 0.0391, 0.0540, 0.0389, 0.0461, 0.0364], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-08 20:31:26,871 INFO [train.py:898] (2/4) Epoch 6, batch 2850, loss[loss=0.2099, simple_loss=0.2956, pruned_loss=0.06208, over 18497.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.3004, pruned_loss=0.07225, over 3599799.09 frames. ], batch size: 51, lr: 1.82e-02, grad_scale: 4.0 +2023-03-08 20:31:37,650 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.172e+02 4.192e+02 4.961e+02 6.331e+02 1.118e+03, threshold=9.922e+02, percent-clipped=2.0 +2023-03-08 20:31:54,452 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21044.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:31:54,816 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-08 20:32:03,934 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21052.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:32:11,795 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4923, 6.0027, 5.3648, 5.8007, 5.4751, 5.5751, 6.0262, 6.0215], + device='cuda:2'), covar=tensor([0.1017, 0.0562, 0.0463, 0.0587, 0.1462, 0.0512, 0.0442, 0.0531], + device='cuda:2'), in_proj_covar=tensor([0.0431, 0.0348, 0.0277, 0.0383, 0.0530, 0.0383, 0.0457, 0.0354], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0003], + device='cuda:2') +2023-03-08 20:32:24,739 INFO [train.py:898] (2/4) Epoch 6, batch 2900, loss[loss=0.237, simple_loss=0.3185, pruned_loss=0.07776, over 18357.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.3007, pruned_loss=0.07251, over 3608353.46 frames. ], batch size: 56, lr: 1.82e-02, grad_scale: 4.0 +2023-03-08 20:32:28,179 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21074.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:32:49,271 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21092.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:33:14,700 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3656, 5.3257, 4.7708, 5.3095, 5.2880, 4.5698, 5.1968, 4.8914], + device='cuda:2'), covar=tensor([0.0417, 0.0412, 0.1734, 0.0748, 0.0409, 0.0474, 0.0428, 0.0778], + device='cuda:2'), in_proj_covar=tensor([0.0331, 0.0385, 0.0530, 0.0302, 0.0274, 0.0345, 0.0376, 0.0469], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-08 20:33:14,837 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21113.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:33:20,489 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21118.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:33:20,762 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.06 vs. limit=5.0 +2023-03-08 20:33:23,467 INFO [train.py:898] (2/4) Epoch 6, batch 2950, loss[loss=0.2225, simple_loss=0.3031, pruned_loss=0.07095, over 18317.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.3012, pruned_loss=0.07273, over 3605695.25 frames. ], batch size: 54, lr: 1.82e-02, grad_scale: 4.0 +2023-03-08 20:33:24,831 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21122.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:33:33,504 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.673e+02 4.312e+02 5.615e+02 7.522e+02 2.010e+03, threshold=1.123e+03, percent-clipped=9.0 +2023-03-08 20:33:34,309 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.03 vs. limit=2.0 +2023-03-08 20:34:09,378 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8432, 3.9188, 2.3864, 3.9983, 4.7709, 2.3674, 3.4015, 3.4100], + device='cuda:2'), covar=tensor([0.0057, 0.0924, 0.1422, 0.0434, 0.0051, 0.1246, 0.0679, 0.0704], + device='cuda:2'), in_proj_covar=tensor([0.0088, 0.0179, 0.0179, 0.0173, 0.0076, 0.0165, 0.0185, 0.0180], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 20:34:19,975 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.85 vs. limit=2.0 +2023-03-08 20:34:22,620 INFO [train.py:898] (2/4) Epoch 6, batch 3000, loss[loss=0.2413, simple_loss=0.3191, pruned_loss=0.08173, over 18096.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.3009, pruned_loss=0.07239, over 3587856.52 frames. ], batch size: 62, lr: 1.82e-02, grad_scale: 4.0 +2023-03-08 20:34:22,620 INFO [train.py:923] (2/4) Computing validation loss +2023-03-08 20:34:34,663 INFO [train.py:932] (2/4) Epoch 6, validation: loss=0.1727, simple_loss=0.276, pruned_loss=0.03476, over 944034.00 frames. +2023-03-08 20:34:34,664 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-08 20:34:45,171 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21179.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:35:13,956 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.4563, 5.0215, 5.0331, 4.9711, 4.5846, 4.8605, 4.2050, 4.8551], + device='cuda:2'), covar=tensor([0.0246, 0.0303, 0.0211, 0.0253, 0.0374, 0.0238, 0.1290, 0.0266], + device='cuda:2'), in_proj_covar=tensor([0.0140, 0.0184, 0.0166, 0.0171, 0.0174, 0.0183, 0.0250, 0.0163], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005], + device='cuda:2') +2023-03-08 20:35:33,740 INFO [train.py:898] (2/4) Epoch 6, batch 3050, loss[loss=0.2023, simple_loss=0.2832, pruned_loss=0.06068, over 18244.00 frames. ], tot_loss[loss=0.223, simple_loss=0.3009, pruned_loss=0.0726, over 3573573.58 frames. ], batch size: 45, lr: 1.81e-02, grad_scale: 4.0 +2023-03-08 20:35:45,056 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.742e+02 3.885e+02 4.643e+02 5.808e+02 1.137e+03, threshold=9.287e+02, percent-clipped=1.0 +2023-03-08 20:35:53,467 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0232, 5.0895, 5.0563, 4.8741, 4.6890, 4.8747, 5.2215, 5.2081], + device='cuda:2'), covar=tensor([0.0045, 0.0054, 0.0048, 0.0073, 0.0065, 0.0084, 0.0054, 0.0084], + device='cuda:2'), in_proj_covar=tensor([0.0066, 0.0049, 0.0049, 0.0063, 0.0053, 0.0072, 0.0060, 0.0061], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 20:36:02,187 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21244.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:36:32,475 INFO [train.py:898] (2/4) Epoch 6, batch 3100, loss[loss=0.2414, simple_loss=0.3206, pruned_loss=0.08109, over 18341.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.3007, pruned_loss=0.07256, over 3579089.20 frames. ], batch size: 55, lr: 1.81e-02, grad_scale: 4.0 +2023-03-08 20:36:58,418 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21292.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:37:09,296 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2305, 2.5314, 2.2760, 2.7804, 3.1978, 3.4030, 2.8205, 2.9749], + device='cuda:2'), covar=tensor([0.0252, 0.0242, 0.0734, 0.0333, 0.0144, 0.0105, 0.0326, 0.0254], + device='cuda:2'), in_proj_covar=tensor([0.0105, 0.0080, 0.0136, 0.0114, 0.0081, 0.0064, 0.0103, 0.0103], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 20:37:21,480 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21312.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:37:31,459 INFO [train.py:898] (2/4) Epoch 6, batch 3150, loss[loss=0.1875, simple_loss=0.2743, pruned_loss=0.05036, over 18387.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.2998, pruned_loss=0.07199, over 3590412.13 frames. ], batch size: 50, lr: 1.81e-02, grad_scale: 4.0 +2023-03-08 20:37:39,613 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21328.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:37:41,466 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.840e+02 4.055e+02 4.775e+02 6.175e+02 1.308e+03, threshold=9.551e+02, percent-clipped=5.0 +2023-03-08 20:38:06,753 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1923, 4.9816, 5.3763, 5.1888, 5.0665, 5.8117, 5.4553, 5.2365], + device='cuda:2'), covar=tensor([0.0840, 0.0584, 0.0578, 0.0527, 0.1269, 0.0594, 0.0527, 0.1319], + device='cuda:2'), in_proj_covar=tensor([0.0257, 0.0194, 0.0205, 0.0197, 0.0242, 0.0290, 0.0190, 0.0281], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-08 20:38:29,900 INFO [train.py:898] (2/4) Epoch 6, batch 3200, loss[loss=0.2794, simple_loss=0.3305, pruned_loss=0.1142, over 12767.00 frames. ], tot_loss[loss=0.2222, simple_loss=0.2997, pruned_loss=0.07237, over 3577753.59 frames. ], batch size: 129, lr: 1.81e-02, grad_scale: 8.0 +2023-03-08 20:38:32,541 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21373.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:38:52,078 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21389.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:39:14,473 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21408.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:39:22,493 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7171, 3.4634, 1.8836, 4.3733, 2.9150, 4.5398, 2.5412, 4.0508], + device='cuda:2'), covar=tensor([0.0400, 0.0634, 0.1276, 0.0315, 0.0825, 0.0178, 0.0966, 0.0279], + device='cuda:2'), in_proj_covar=tensor([0.0158, 0.0190, 0.0163, 0.0180, 0.0165, 0.0158, 0.0172, 0.0162], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 20:39:28,808 INFO [train.py:898] (2/4) Epoch 6, batch 3250, loss[loss=0.2091, simple_loss=0.2927, pruned_loss=0.06274, over 18494.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2995, pruned_loss=0.0723, over 3574197.01 frames. ], batch size: 51, lr: 1.81e-02, grad_scale: 8.0 +2023-03-08 20:39:39,005 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.706e+02 4.124e+02 5.141e+02 6.520e+02 1.245e+03, threshold=1.028e+03, percent-clipped=2.0 +2023-03-08 20:40:19,369 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3532, 2.5603, 2.3123, 2.6227, 3.2019, 3.0499, 2.8385, 2.8382], + device='cuda:2'), covar=tensor([0.0171, 0.0286, 0.0671, 0.0548, 0.0247, 0.0235, 0.0396, 0.0277], + device='cuda:2'), in_proj_covar=tensor([0.0106, 0.0082, 0.0136, 0.0117, 0.0082, 0.0066, 0.0104, 0.0104], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 20:40:28,087 INFO [train.py:898] (2/4) Epoch 6, batch 3300, loss[loss=0.2265, simple_loss=0.3136, pruned_loss=0.06974, over 17201.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2991, pruned_loss=0.07206, over 3579009.83 frames. ], batch size: 78, lr: 1.80e-02, grad_scale: 8.0 +2023-03-08 20:40:31,903 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21474.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:40:35,598 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6352, 4.5245, 4.7103, 3.4737, 3.4476, 3.6586, 2.3420, 1.9813], + device='cuda:2'), covar=tensor([0.0222, 0.0122, 0.0044, 0.0235, 0.0387, 0.0185, 0.0778, 0.1050], + device='cuda:2'), in_proj_covar=tensor([0.0045, 0.0041, 0.0038, 0.0050, 0.0072, 0.0047, 0.0069, 0.0074], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0005, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 20:40:41,114 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.4407, 5.5999, 3.0539, 5.3107, 5.3105, 5.6437, 5.4155, 2.8057], + device='cuda:2'), covar=tensor([0.0114, 0.0057, 0.0629, 0.0052, 0.0056, 0.0049, 0.0087, 0.0874], + device='cuda:2'), in_proj_covar=tensor([0.0065, 0.0054, 0.0083, 0.0068, 0.0064, 0.0054, 0.0069, 0.0087], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 20:41:04,204 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7588, 1.8118, 2.8208, 2.7945, 3.5703, 5.1602, 4.5077, 4.0835], + device='cuda:2'), covar=tensor([0.0632, 0.1410, 0.1395, 0.0913, 0.1229, 0.0039, 0.0278, 0.0284], + device='cuda:2'), in_proj_covar=tensor([0.0179, 0.0228, 0.0219, 0.0213, 0.0312, 0.0132, 0.0203, 0.0160], + device='cuda:2'), out_proj_covar=tensor([1.2401e-04, 1.5974e-04, 1.6088e-04, 1.3720e-04, 2.1798e-04, 8.8377e-05, + 1.3785e-04, 1.1166e-04], device='cuda:2') +2023-03-08 20:41:27,332 INFO [train.py:898] (2/4) Epoch 6, batch 3350, loss[loss=0.22, simple_loss=0.2959, pruned_loss=0.072, over 18369.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.299, pruned_loss=0.072, over 3571759.65 frames. ], batch size: 50, lr: 1.80e-02, grad_scale: 8.0 +2023-03-08 20:41:37,410 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.459e+02 4.317e+02 5.194e+02 7.020e+02 1.247e+03, threshold=1.039e+03, percent-clipped=7.0 +2023-03-08 20:41:59,511 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.96 vs. limit=2.0 +2023-03-08 20:42:25,863 INFO [train.py:898] (2/4) Epoch 6, batch 3400, loss[loss=0.2335, simple_loss=0.3138, pruned_loss=0.07664, over 18303.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2988, pruned_loss=0.07186, over 3567665.27 frames. ], batch size: 57, lr: 1.80e-02, grad_scale: 4.0 +2023-03-08 20:42:49,850 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5734, 5.2199, 5.2154, 5.1700, 4.7280, 5.0476, 4.3826, 5.0823], + device='cuda:2'), covar=tensor([0.0236, 0.0252, 0.0190, 0.0236, 0.0385, 0.0219, 0.1167, 0.0243], + device='cuda:2'), in_proj_covar=tensor([0.0139, 0.0184, 0.0167, 0.0173, 0.0172, 0.0180, 0.0249, 0.0164], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005], + device='cuda:2') +2023-03-08 20:42:53,960 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21595.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:43:15,654 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21613.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:43:24,540 INFO [train.py:898] (2/4) Epoch 6, batch 3450, loss[loss=0.2116, simple_loss=0.2872, pruned_loss=0.06802, over 18244.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2992, pruned_loss=0.07151, over 3581948.53 frames. ], batch size: 47, lr: 1.80e-02, grad_scale: 4.0 +2023-03-08 20:43:35,799 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.934e+02 4.048e+02 5.156e+02 6.271e+02 2.369e+03, threshold=1.031e+03, percent-clipped=5.0 +2023-03-08 20:44:05,817 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21656.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:44:20,620 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21668.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:44:23,903 INFO [train.py:898] (2/4) Epoch 6, batch 3500, loss[loss=0.2082, simple_loss=0.2962, pruned_loss=0.06006, over 18556.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2981, pruned_loss=0.07074, over 3595516.64 frames. ], batch size: 54, lr: 1.80e-02, grad_scale: 4.0 +2023-03-08 20:44:27,545 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21674.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:44:38,484 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21684.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:44:55,214 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6606, 3.6499, 5.3236, 4.3785, 3.3287, 3.0771, 4.2637, 5.3847], + device='cuda:2'), covar=tensor([0.0743, 0.1321, 0.0043, 0.0267, 0.0804, 0.1023, 0.0321, 0.0063], + device='cuda:2'), in_proj_covar=tensor([0.0129, 0.0194, 0.0073, 0.0141, 0.0161, 0.0164, 0.0147, 0.0099], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 20:45:04,901 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21708.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:45:18,389 INFO [train.py:898] (2/4) Epoch 6, batch 3550, loss[loss=0.1903, simple_loss=0.2649, pruned_loss=0.0578, over 18396.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2982, pruned_loss=0.07102, over 3588426.49 frames. ], batch size: 42, lr: 1.79e-02, grad_scale: 4.0 +2023-03-08 20:45:28,810 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.680e+02 4.052e+02 4.672e+02 6.032e+02 1.745e+03, threshold=9.344e+02, percent-clipped=2.0 +2023-03-08 20:45:56,747 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21756.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:46:04,468 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21763.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:46:12,829 INFO [train.py:898] (2/4) Epoch 6, batch 3600, loss[loss=0.1962, simple_loss=0.2784, pruned_loss=0.05701, over 18304.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2984, pruned_loss=0.07123, over 3591183.03 frames. ], batch size: 49, lr: 1.79e-02, grad_scale: 8.0 +2023-03-08 20:46:16,438 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21774.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:47:18,370 INFO [train.py:898] (2/4) Epoch 7, batch 0, loss[loss=0.1821, simple_loss=0.2594, pruned_loss=0.05243, over 18490.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2594, pruned_loss=0.05243, over 18490.00 frames. ], batch size: 44, lr: 1.68e-02, grad_scale: 8.0 +2023-03-08 20:47:18,370 INFO [train.py:923] (2/4) Computing validation loss +2023-03-08 20:47:30,264 INFO [train.py:932] (2/4) Epoch 7, validation: loss=0.175, simple_loss=0.2779, pruned_loss=0.0361, over 944034.00 frames. +2023-03-08 20:47:30,265 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-08 20:47:50,713 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=21822.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:47:53,857 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21824.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:48:01,605 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.548e+02 4.151e+02 4.797e+02 6.024e+02 1.150e+03, threshold=9.595e+02, percent-clipped=4.0 +2023-03-08 20:48:08,233 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.92 vs. limit=5.0 +2023-03-08 20:48:16,258 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.75 vs. limit=2.0 +2023-03-08 20:48:23,606 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21850.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:48:28,870 INFO [train.py:898] (2/4) Epoch 7, batch 50, loss[loss=0.2382, simple_loss=0.3164, pruned_loss=0.08002, over 18350.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2988, pruned_loss=0.07068, over 812840.77 frames. ], batch size: 55, lr: 1.68e-02, grad_scale: 8.0 +2023-03-08 20:49:04,343 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0001, 4.4930, 4.6489, 3.4007, 3.6122, 3.4268, 2.2977, 2.0898], + device='cuda:2'), covar=tensor([0.0163, 0.0172, 0.0039, 0.0243, 0.0345, 0.0246, 0.0852, 0.0989], + device='cuda:2'), in_proj_covar=tensor([0.0045, 0.0042, 0.0037, 0.0051, 0.0072, 0.0047, 0.0068, 0.0073], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0005, 0.0003, 0.0004, 0.0004], + device='cuda:2') +2023-03-08 20:49:12,836 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.50 vs. limit=5.0 +2023-03-08 20:49:27,607 INFO [train.py:898] (2/4) Epoch 7, batch 100, loss[loss=0.2113, simple_loss=0.297, pruned_loss=0.06274, over 18355.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2971, pruned_loss=0.07073, over 1417614.26 frames. ], batch size: 55, lr: 1.67e-02, grad_scale: 8.0 +2023-03-08 20:49:32,555 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6674, 4.5738, 4.7678, 4.4817, 4.3850, 4.5508, 4.9231, 4.8574], + device='cuda:2'), covar=tensor([0.0068, 0.0083, 0.0067, 0.0098, 0.0069, 0.0105, 0.0090, 0.0104], + device='cuda:2'), in_proj_covar=tensor([0.0065, 0.0049, 0.0049, 0.0063, 0.0052, 0.0073, 0.0061, 0.0061], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 20:49:35,089 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21911.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:49:58,913 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.330e+02 3.869e+02 4.841e+02 5.741e+02 1.343e+03, threshold=9.682e+02, percent-clipped=1.0 +2023-03-08 20:50:22,297 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21951.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:50:26,554 INFO [train.py:898] (2/4) Epoch 7, batch 150, loss[loss=0.2385, simple_loss=0.3152, pruned_loss=0.08094, over 18297.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2982, pruned_loss=0.07066, over 1900149.70 frames. ], batch size: 57, lr: 1.67e-02, grad_scale: 8.0 +2023-03-08 20:50:41,078 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21968.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:50:42,142 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21969.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:50:43,495 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21970.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:50:43,629 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6386, 3.7254, 5.3612, 4.5264, 3.4060, 3.2407, 4.6151, 5.5198], + device='cuda:2'), covar=tensor([0.0771, 0.1539, 0.0043, 0.0245, 0.0715, 0.0906, 0.0243, 0.0071], + device='cuda:2'), in_proj_covar=tensor([0.0132, 0.0202, 0.0075, 0.0144, 0.0164, 0.0167, 0.0150, 0.0102], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 20:51:01,863 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21984.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:51:30,384 INFO [train.py:898] (2/4) Epoch 7, batch 200, loss[loss=0.207, simple_loss=0.2906, pruned_loss=0.06168, over 18346.00 frames. ], tot_loss[loss=0.2181, simple_loss=0.297, pruned_loss=0.06956, over 2280674.11 frames. ], batch size: 55, lr: 1.67e-02, grad_scale: 8.0 +2023-03-08 20:51:43,121 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22016.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:51:59,830 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.533e+02 3.799e+02 4.724e+02 5.611e+02 1.174e+03, threshold=9.448e+02, percent-clipped=1.0 +2023-03-08 20:52:00,878 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22031.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:52:01,875 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22032.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:52:28,977 INFO [train.py:898] (2/4) Epoch 7, batch 250, loss[loss=0.24, simple_loss=0.3186, pruned_loss=0.08065, over 18313.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2983, pruned_loss=0.07012, over 2568984.86 frames. ], batch size: 54, lr: 1.67e-02, grad_scale: 8.0 +2023-03-08 20:53:05,686 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4217, 6.0638, 5.4671, 5.8394, 5.4725, 5.4930, 6.0749, 5.9881], + device='cuda:2'), covar=tensor([0.1113, 0.0608, 0.0441, 0.0642, 0.1686, 0.0716, 0.0520, 0.0625], + device='cuda:2'), in_proj_covar=tensor([0.0454, 0.0369, 0.0280, 0.0401, 0.0555, 0.0404, 0.0489, 0.0373], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-08 20:53:28,162 INFO [train.py:898] (2/4) Epoch 7, batch 300, loss[loss=0.1964, simple_loss=0.281, pruned_loss=0.05585, over 18621.00 frames. ], tot_loss[loss=0.2175, simple_loss=0.2966, pruned_loss=0.06926, over 2792238.16 frames. ], batch size: 52, lr: 1.67e-02, grad_scale: 8.0 +2023-03-08 20:53:37,755 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-08 20:53:44,075 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22119.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:53:56,208 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22130.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:53:56,970 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.650e+02 3.954e+02 4.582e+02 6.003e+02 1.434e+03, threshold=9.164e+02, percent-clipped=5.0 +2023-03-08 20:54:26,864 INFO [train.py:898] (2/4) Epoch 7, batch 350, loss[loss=0.2238, simple_loss=0.306, pruned_loss=0.07076, over 18126.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2963, pruned_loss=0.06915, over 2962246.97 frames. ], batch size: 62, lr: 1.67e-02, grad_scale: 8.0 +2023-03-08 20:54:50,735 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22176.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:55:03,925 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-08 20:55:08,582 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22191.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:55:11,432 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9256, 4.1179, 2.0939, 4.1399, 5.0707, 2.3360, 3.6271, 3.7620], + device='cuda:2'), covar=tensor([0.0076, 0.1074, 0.1743, 0.0580, 0.0045, 0.1521, 0.0722, 0.0754], + device='cuda:2'), in_proj_covar=tensor([0.0087, 0.0185, 0.0176, 0.0176, 0.0075, 0.0163, 0.0186, 0.0182], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 20:55:25,932 INFO [train.py:898] (2/4) Epoch 7, batch 400, loss[loss=0.2839, simple_loss=0.3402, pruned_loss=0.1138, over 12695.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2959, pruned_loss=0.06907, over 3100407.39 frames. ], batch size: 129, lr: 1.66e-02, grad_scale: 8.0 +2023-03-08 20:55:27,385 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22206.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:55:37,535 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22215.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:55:55,642 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.341e+02 3.724e+02 4.646e+02 6.354e+02 1.977e+03, threshold=9.292e+02, percent-clipped=7.0 +2023-03-08 20:56:02,947 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22237.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:56:02,965 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22237.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:56:05,369 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6563, 3.6605, 3.4575, 2.9499, 3.4072, 2.8587, 2.7226, 3.7795], + device='cuda:2'), covar=tensor([0.0035, 0.0058, 0.0062, 0.0112, 0.0079, 0.0128, 0.0168, 0.0038], + device='cuda:2'), in_proj_covar=tensor([0.0066, 0.0084, 0.0076, 0.0120, 0.0080, 0.0120, 0.0127, 0.0068], + device='cuda:2'), out_proj_covar=tensor([9.3146e-05, 1.2931e-04, 1.1590e-04, 1.9186e-04, 1.2060e-04, 1.8688e-04, + 1.9720e-04, 9.9473e-05], device='cuda:2') +2023-03-08 20:56:20,987 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22251.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:56:23,135 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3941, 6.0490, 5.3970, 5.7287, 5.5661, 5.4459, 6.0749, 5.9885], + device='cuda:2'), covar=tensor([0.1046, 0.0604, 0.0384, 0.0670, 0.1280, 0.0616, 0.0461, 0.0583], + device='cuda:2'), in_proj_covar=tensor([0.0450, 0.0370, 0.0282, 0.0397, 0.0546, 0.0400, 0.0487, 0.0371], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-08 20:56:25,003 INFO [train.py:898] (2/4) Epoch 7, batch 450, loss[loss=0.2498, simple_loss=0.3198, pruned_loss=0.08985, over 18502.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2963, pruned_loss=0.06897, over 3214448.72 frames. ], batch size: 53, lr: 1.66e-02, grad_scale: 8.0 +2023-03-08 20:56:41,404 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22269.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:56:49,364 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22276.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:56:50,549 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9742, 4.9756, 5.0558, 4.8292, 4.7471, 4.8633, 5.1429, 5.1723], + device='cuda:2'), covar=tensor([0.0050, 0.0059, 0.0065, 0.0067, 0.0054, 0.0083, 0.0071, 0.0081], + device='cuda:2'), in_proj_covar=tensor([0.0066, 0.0048, 0.0049, 0.0064, 0.0054, 0.0073, 0.0062, 0.0061], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 20:57:15,028 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22298.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:57:16,446 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22299.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:57:23,196 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5991, 1.9380, 2.7368, 2.8308, 3.5746, 5.4042, 4.8173, 4.4509], + device='cuda:2'), covar=tensor([0.0686, 0.1491, 0.1521, 0.0907, 0.1294, 0.0037, 0.0233, 0.0220], + device='cuda:2'), in_proj_covar=tensor([0.0183, 0.0236, 0.0229, 0.0217, 0.0320, 0.0135, 0.0207, 0.0163], + device='cuda:2'), out_proj_covar=tensor([1.2604e-04, 1.6321e-04, 1.6597e-04, 1.3840e-04, 2.2138e-04, 9.0068e-05, + 1.3939e-04, 1.1234e-04], device='cuda:2') +2023-03-08 20:57:23,807 INFO [train.py:898] (2/4) Epoch 7, batch 500, loss[loss=0.1724, simple_loss=0.2517, pruned_loss=0.04654, over 18420.00 frames. ], tot_loss[loss=0.218, simple_loss=0.2972, pruned_loss=0.06937, over 3294543.48 frames. ], batch size: 43, lr: 1.66e-02, grad_scale: 8.0 +2023-03-08 20:57:37,449 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22317.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:57:47,843 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22326.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:57:53,234 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.810e+02 3.764e+02 4.516e+02 5.475e+02 8.626e+02, threshold=9.031e+02, percent-clipped=0.0 +2023-03-08 20:57:58,460 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.82 vs. limit=5.0 +2023-03-08 20:58:23,008 INFO [train.py:898] (2/4) Epoch 7, batch 550, loss[loss=0.2139, simple_loss=0.2986, pruned_loss=0.06463, over 18378.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2962, pruned_loss=0.06877, over 3371218.90 frames. ], batch size: 50, lr: 1.66e-02, grad_scale: 8.0 +2023-03-08 20:58:57,592 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22385.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:59:21,882 INFO [train.py:898] (2/4) Epoch 7, batch 600, loss[loss=0.2176, simple_loss=0.3016, pruned_loss=0.06683, over 18353.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2947, pruned_loss=0.06805, over 3427278.56 frames. ], batch size: 56, lr: 1.66e-02, grad_scale: 8.0 +2023-03-08 20:59:22,261 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5847, 4.0521, 4.1367, 3.1906, 3.4319, 3.3157, 2.3247, 1.8035], + device='cuda:2'), covar=tensor([0.0164, 0.0162, 0.0078, 0.0248, 0.0364, 0.0204, 0.0809, 0.0975], + device='cuda:2'), in_proj_covar=tensor([0.0045, 0.0042, 0.0038, 0.0052, 0.0072, 0.0047, 0.0069, 0.0074], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 20:59:38,689 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22419.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 20:59:47,818 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2543, 2.4110, 2.3299, 2.6077, 3.3003, 3.0665, 2.7860, 2.8347], + device='cuda:2'), covar=tensor([0.0241, 0.0303, 0.0717, 0.0398, 0.0186, 0.0181, 0.0347, 0.0287], + device='cuda:2'), in_proj_covar=tensor([0.0104, 0.0080, 0.0134, 0.0113, 0.0077, 0.0063, 0.0103, 0.0104], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 20:59:52,060 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.568e+02 3.797e+02 4.706e+02 5.910e+02 1.335e+03, threshold=9.411e+02, percent-clipped=3.0 +2023-03-08 21:00:09,526 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22446.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:00:15,172 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8873, 4.9957, 4.9564, 4.7444, 4.6972, 4.7573, 5.1763, 5.0699], + device='cuda:2'), covar=tensor([0.0060, 0.0060, 0.0075, 0.0084, 0.0066, 0.0102, 0.0064, 0.0093], + device='cuda:2'), in_proj_covar=tensor([0.0067, 0.0050, 0.0051, 0.0065, 0.0055, 0.0074, 0.0064, 0.0063], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 21:00:19,239 INFO [train.py:898] (2/4) Epoch 7, batch 650, loss[loss=0.1949, simple_loss=0.2832, pruned_loss=0.05326, over 18486.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2945, pruned_loss=0.06785, over 3471646.55 frames. ], batch size: 51, lr: 1.65e-02, grad_scale: 8.0 +2023-03-08 21:00:35,199 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22467.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:00:48,060 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0790, 4.2095, 2.2545, 4.2594, 5.0740, 2.2424, 3.5426, 3.7723], + device='cuda:2'), covar=tensor([0.0082, 0.0908, 0.1596, 0.0453, 0.0047, 0.1554, 0.0765, 0.0729], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0190, 0.0180, 0.0178, 0.0077, 0.0168, 0.0189, 0.0184], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-08 21:00:56,992 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22486.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:00:59,450 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9047, 3.8942, 5.3534, 3.6736, 4.4670, 2.7922, 3.1196, 2.3085], + device='cuda:2'), covar=tensor([0.0692, 0.0593, 0.0041, 0.0360, 0.0436, 0.1698, 0.1985, 0.1290], + device='cuda:2'), in_proj_covar=tensor([0.0173, 0.0189, 0.0088, 0.0146, 0.0201, 0.0228, 0.0228, 0.0189], + device='cuda:2'), out_proj_covar=tensor([1.6020e-04, 1.8085e-04, 8.5673e-05, 1.3870e-04, 1.9087e-04, 2.1783e-04, + 2.2043e-04, 1.8192e-04], device='cuda:2') +2023-03-08 21:01:18,247 INFO [train.py:898] (2/4) Epoch 7, batch 700, loss[loss=0.1888, simple_loss=0.268, pruned_loss=0.05473, over 18265.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2943, pruned_loss=0.06743, over 3505149.37 frames. ], batch size: 47, lr: 1.65e-02, grad_scale: 8.0 +2023-03-08 21:01:19,488 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22506.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:01:20,883 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-08 21:01:49,200 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.755e+02 3.999e+02 4.789e+02 5.664e+02 1.125e+03, threshold=9.578e+02, percent-clipped=2.0 +2023-03-08 21:01:50,640 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22532.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:02:08,978 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5471, 3.3259, 3.1865, 2.7186, 3.2136, 2.4581, 2.5047, 3.4762], + device='cuda:2'), covar=tensor([0.0026, 0.0058, 0.0059, 0.0111, 0.0060, 0.0162, 0.0162, 0.0049], + device='cuda:2'), in_proj_covar=tensor([0.0064, 0.0084, 0.0076, 0.0120, 0.0078, 0.0119, 0.0125, 0.0068], + device='cuda:2'), out_proj_covar=tensor([9.1072e-05, 1.2927e-04, 1.1531e-04, 1.9031e-04, 1.1715e-04, 1.8526e-04, + 1.9426e-04, 9.9018e-05], device='cuda:2') +2023-03-08 21:02:15,568 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22554.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:02:16,498 INFO [train.py:898] (2/4) Epoch 7, batch 750, loss[loss=0.2136, simple_loss=0.2987, pruned_loss=0.06424, over 18492.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2953, pruned_loss=0.06825, over 3516154.36 frames. ], batch size: 53, lr: 1.65e-02, grad_scale: 8.0 +2023-03-08 21:02:37,337 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22571.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:03:01,762 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22593.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:03:15,338 INFO [train.py:898] (2/4) Epoch 7, batch 800, loss[loss=0.2131, simple_loss=0.2937, pruned_loss=0.0663, over 18631.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2957, pruned_loss=0.06824, over 3541017.58 frames. ], batch size: 52, lr: 1.65e-02, grad_scale: 8.0 +2023-03-08 21:03:41,300 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22626.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:03:46,428 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.314e+02 4.023e+02 4.944e+02 6.162e+02 1.524e+03, threshold=9.887e+02, percent-clipped=2.0 +2023-03-08 21:03:55,386 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22639.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:03:56,381 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7919, 4.9326, 5.0013, 4.7500, 4.5444, 4.8084, 5.0974, 5.0747], + device='cuda:2'), covar=tensor([0.0063, 0.0060, 0.0055, 0.0073, 0.0071, 0.0089, 0.0059, 0.0073], + device='cuda:2'), in_proj_covar=tensor([0.0068, 0.0050, 0.0051, 0.0065, 0.0055, 0.0075, 0.0064, 0.0063], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 21:04:10,505 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-08 21:04:13,323 INFO [train.py:898] (2/4) Epoch 7, batch 850, loss[loss=0.2087, simple_loss=0.2913, pruned_loss=0.06305, over 18537.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2957, pruned_loss=0.06839, over 3552611.70 frames. ], batch size: 49, lr: 1.65e-02, grad_scale: 8.0 +2023-03-08 21:04:36,640 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22674.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:05:07,204 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22700.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 21:05:12,417 INFO [train.py:898] (2/4) Epoch 7, batch 900, loss[loss=0.1857, simple_loss=0.2652, pruned_loss=0.0531, over 18508.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2946, pruned_loss=0.06801, over 3565836.99 frames. ], batch size: 47, lr: 1.65e-02, grad_scale: 8.0 +2023-03-08 21:05:36,585 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0842, 4.1831, 5.2949, 3.2879, 4.3982, 2.7910, 3.0390, 2.2365], + device='cuda:2'), covar=tensor([0.0693, 0.0508, 0.0039, 0.0475, 0.0499, 0.1846, 0.1838, 0.1456], + device='cuda:2'), in_proj_covar=tensor([0.0176, 0.0190, 0.0089, 0.0146, 0.0203, 0.0230, 0.0231, 0.0191], + device='cuda:2'), out_proj_covar=tensor([1.6273e-04, 1.8145e-04, 8.6576e-05, 1.3922e-04, 1.9275e-04, 2.1909e-04, + 2.2260e-04, 1.8338e-04], device='cuda:2') +2023-03-08 21:05:44,110 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.332e+02 3.928e+02 4.679e+02 5.681e+02 1.388e+03, threshold=9.358e+02, percent-clipped=3.0 +2023-03-08 21:05:55,314 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22741.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:06:10,774 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-03-08 21:06:11,227 INFO [train.py:898] (2/4) Epoch 7, batch 950, loss[loss=0.226, simple_loss=0.304, pruned_loss=0.074, over 16925.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2943, pruned_loss=0.06775, over 3571370.75 frames. ], batch size: 78, lr: 1.64e-02, grad_scale: 8.0 +2023-03-08 21:06:49,040 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22786.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:06:58,194 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22794.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:07:10,657 INFO [train.py:898] (2/4) Epoch 7, batch 1000, loss[loss=0.1842, simple_loss=0.262, pruned_loss=0.0532, over 18167.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2939, pruned_loss=0.06758, over 3566668.35 frames. ], batch size: 44, lr: 1.64e-02, grad_scale: 8.0 +2023-03-08 21:07:33,508 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22825.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:07:41,078 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.326e+02 3.793e+02 5.006e+02 6.046e+02 1.509e+03, threshold=1.001e+03, percent-clipped=4.0 +2023-03-08 21:07:42,901 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22832.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:07:45,172 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22834.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:08:09,923 INFO [train.py:898] (2/4) Epoch 7, batch 1050, loss[loss=0.177, simple_loss=0.2541, pruned_loss=0.04991, over 18438.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2938, pruned_loss=0.06771, over 3576485.97 frames. ], batch size: 43, lr: 1.64e-02, grad_scale: 8.0 +2023-03-08 21:08:10,354 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22855.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:08:28,517 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22871.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:08:38,426 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22880.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:08:47,291 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22886.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:08:55,693 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22893.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:09:09,172 INFO [train.py:898] (2/4) Epoch 7, batch 1100, loss[loss=0.2116, simple_loss=0.2953, pruned_loss=0.06395, over 18487.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2943, pruned_loss=0.06801, over 3576709.11 frames. ], batch size: 53, lr: 1.64e-02, grad_scale: 8.0 +2023-03-08 21:09:25,080 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22919.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:09:38,077 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.302e+02 3.634e+02 4.460e+02 5.357e+02 1.645e+03, threshold=8.921e+02, percent-clipped=3.0 +2023-03-08 21:09:51,131 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=22941.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:10:07,845 INFO [train.py:898] (2/4) Epoch 7, batch 1150, loss[loss=0.2219, simple_loss=0.3017, pruned_loss=0.07102, over 18224.00 frames. ], tot_loss[loss=0.216, simple_loss=0.295, pruned_loss=0.06852, over 3571476.36 frames. ], batch size: 60, lr: 1.64e-02, grad_scale: 8.0 +2023-03-08 21:10:55,034 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22995.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 21:11:06,251 INFO [train.py:898] (2/4) Epoch 7, batch 1200, loss[loss=0.2009, simple_loss=0.2883, pruned_loss=0.05673, over 18391.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2955, pruned_loss=0.06915, over 3564536.08 frames. ], batch size: 52, lr: 1.64e-02, grad_scale: 8.0 +2023-03-08 21:11:34,434 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.86 vs. limit=2.0 +2023-03-08 21:11:35,903 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.453e+02 4.180e+02 4.856e+02 6.104e+02 1.411e+03, threshold=9.713e+02, percent-clipped=4.0 +2023-03-08 21:11:49,482 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23041.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:12:05,439 INFO [train.py:898] (2/4) Epoch 7, batch 1250, loss[loss=0.2026, simple_loss=0.2716, pruned_loss=0.06685, over 18436.00 frames. ], tot_loss[loss=0.2174, simple_loss=0.296, pruned_loss=0.06942, over 3550763.01 frames. ], batch size: 43, lr: 1.63e-02, grad_scale: 8.0 +2023-03-08 21:12:44,838 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23089.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:13:04,092 INFO [train.py:898] (2/4) Epoch 7, batch 1300, loss[loss=0.2085, simple_loss=0.277, pruned_loss=0.06996, over 18131.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2952, pruned_loss=0.06925, over 3547839.08 frames. ], batch size: 40, lr: 1.63e-02, grad_scale: 8.0 +2023-03-08 21:13:07,936 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23108.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:13:33,528 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.650e+02 3.767e+02 4.521e+02 6.073e+02 1.537e+03, threshold=9.042e+02, percent-clipped=6.0 +2023-03-08 21:13:57,457 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23150.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:14:03,019 INFO [train.py:898] (2/4) Epoch 7, batch 1350, loss[loss=0.2217, simple_loss=0.3033, pruned_loss=0.07002, over 18233.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.296, pruned_loss=0.06913, over 3554642.19 frames. ], batch size: 60, lr: 1.63e-02, grad_scale: 8.0 +2023-03-08 21:14:19,175 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23169.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:14:32,498 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23181.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:15:02,205 INFO [train.py:898] (2/4) Epoch 7, batch 1400, loss[loss=0.1818, simple_loss=0.2591, pruned_loss=0.05219, over 17673.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2949, pruned_loss=0.06834, over 3576385.91 frames. ], batch size: 39, lr: 1.63e-02, grad_scale: 8.0 +2023-03-08 21:15:16,064 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23217.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:15:29,674 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23229.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:15:31,615 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.272e+02 3.620e+02 4.462e+02 5.791e+02 9.661e+02, threshold=8.924e+02, percent-clipped=4.0 +2023-03-08 21:15:49,592 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.64 vs. limit=5.0 +2023-03-08 21:16:00,108 INFO [train.py:898] (2/4) Epoch 7, batch 1450, loss[loss=0.2384, simple_loss=0.3006, pruned_loss=0.08809, over 18262.00 frames. ], tot_loss[loss=0.2157, simple_loss=0.2948, pruned_loss=0.06828, over 3583059.58 frames. ], batch size: 47, lr: 1.63e-02, grad_scale: 8.0 +2023-03-08 21:16:26,944 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23278.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:16:40,573 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23290.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:16:46,368 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23295.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:16:55,730 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5893, 2.8701, 2.6557, 2.7882, 3.4724, 3.5067, 2.9099, 2.7985], + device='cuda:2'), covar=tensor([0.0194, 0.0226, 0.0655, 0.0370, 0.0176, 0.0164, 0.0354, 0.0352], + device='cuda:2'), in_proj_covar=tensor([0.0106, 0.0083, 0.0137, 0.0115, 0.0081, 0.0067, 0.0107, 0.0107], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 21:16:57,594 INFO [train.py:898] (2/4) Epoch 7, batch 1500, loss[loss=0.195, simple_loss=0.2666, pruned_loss=0.0617, over 18409.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2941, pruned_loss=0.06774, over 3595047.26 frames. ], batch size: 42, lr: 1.63e-02, grad_scale: 8.0 +2023-03-08 21:17:27,942 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.706e+02 4.019e+02 4.944e+02 6.041e+02 1.007e+03, threshold=9.887e+02, percent-clipped=2.0 +2023-03-08 21:17:41,547 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23343.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:17:55,130 INFO [train.py:898] (2/4) Epoch 7, batch 1550, loss[loss=0.2458, simple_loss=0.3324, pruned_loss=0.0796, over 18275.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2938, pruned_loss=0.06764, over 3595644.77 frames. ], batch size: 60, lr: 1.62e-02, grad_scale: 8.0 +2023-03-08 21:18:08,866 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.87 vs. limit=2.0 +2023-03-08 21:18:53,702 INFO [train.py:898] (2/4) Epoch 7, batch 1600, loss[loss=0.1929, simple_loss=0.2826, pruned_loss=0.05156, over 18311.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2923, pruned_loss=0.06696, over 3587302.08 frames. ], batch size: 54, lr: 1.62e-02, grad_scale: 8.0 +2023-03-08 21:18:55,372 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2242, 4.2565, 2.4651, 4.3431, 5.2649, 2.2820, 3.5803, 3.8461], + device='cuda:2'), covar=tensor([0.0065, 0.0910, 0.1436, 0.0485, 0.0051, 0.1406, 0.0753, 0.0739], + device='cuda:2'), in_proj_covar=tensor([0.0088, 0.0188, 0.0175, 0.0175, 0.0075, 0.0161, 0.0187, 0.0182], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 21:19:20,102 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5738, 6.1041, 5.4889, 5.9019, 5.6366, 5.6020, 6.2128, 6.0933], + device='cuda:2'), covar=tensor([0.1113, 0.0587, 0.0356, 0.0542, 0.1353, 0.0598, 0.0415, 0.0555], + device='cuda:2'), in_proj_covar=tensor([0.0450, 0.0363, 0.0279, 0.0401, 0.0541, 0.0404, 0.0484, 0.0378], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-08 21:19:25,468 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 3.006e+02 3.819e+02 4.598e+02 5.727e+02 1.079e+03, threshold=9.196e+02, percent-clipped=2.0 +2023-03-08 21:19:47,055 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23450.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:19:52,543 INFO [train.py:898] (2/4) Epoch 7, batch 1650, loss[loss=0.1921, simple_loss=0.2775, pruned_loss=0.05337, over 18272.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2929, pruned_loss=0.06703, over 3592765.16 frames. ], batch size: 49, lr: 1.62e-02, grad_scale: 8.0 +2023-03-08 21:20:04,715 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23464.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:20:14,076 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-03-08 21:20:24,344 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23481.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:20:27,840 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0062, 4.8553, 4.9493, 4.7164, 4.7539, 4.8741, 5.1629, 5.1266], + device='cuda:2'), covar=tensor([0.0054, 0.0071, 0.0071, 0.0086, 0.0069, 0.0089, 0.0064, 0.0085], + device='cuda:2'), in_proj_covar=tensor([0.0069, 0.0050, 0.0050, 0.0066, 0.0056, 0.0075, 0.0064, 0.0064], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 21:20:32,484 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23488.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:20:43,642 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23498.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:20:51,423 INFO [train.py:898] (2/4) Epoch 7, batch 1700, loss[loss=0.2257, simple_loss=0.31, pruned_loss=0.07066, over 18498.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.2917, pruned_loss=0.06642, over 3594742.92 frames. ], batch size: 51, lr: 1.62e-02, grad_scale: 8.0 +2023-03-08 21:21:21,210 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23529.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:21:23,330 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.409e+02 3.809e+02 4.583e+02 5.657e+02 1.396e+03, threshold=9.165e+02, percent-clipped=6.0 +2023-03-08 21:21:44,346 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23549.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 21:21:50,737 INFO [train.py:898] (2/4) Epoch 7, batch 1750, loss[loss=0.2326, simple_loss=0.3147, pruned_loss=0.07527, over 18128.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2929, pruned_loss=0.06661, over 3588880.19 frames. ], batch size: 62, lr: 1.62e-02, grad_scale: 16.0 +2023-03-08 21:22:13,852 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23573.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:22:27,625 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23585.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:22:33,836 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-08 21:22:50,506 INFO [train.py:898] (2/4) Epoch 7, batch 1800, loss[loss=0.2193, simple_loss=0.304, pruned_loss=0.06734, over 18411.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2942, pruned_loss=0.06716, over 3582360.60 frames. ], batch size: 52, lr: 1.62e-02, grad_scale: 16.0 +2023-03-08 21:23:21,006 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.500e+02 3.624e+02 4.625e+02 5.651e+02 1.017e+03, threshold=9.251e+02, percent-clipped=3.0 +2023-03-08 21:23:47,601 INFO [train.py:898] (2/4) Epoch 7, batch 1850, loss[loss=0.2113, simple_loss=0.2978, pruned_loss=0.06244, over 18486.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2944, pruned_loss=0.06792, over 3589848.98 frames. ], batch size: 53, lr: 1.61e-02, grad_scale: 16.0 +2023-03-08 21:24:35,317 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.86 vs. limit=2.0 +2023-03-08 21:24:45,517 INFO [train.py:898] (2/4) Epoch 7, batch 1900, loss[loss=0.2301, simple_loss=0.3144, pruned_loss=0.07293, over 18492.00 frames. ], tot_loss[loss=0.2148, simple_loss=0.2939, pruned_loss=0.06785, over 3584767.37 frames. ], batch size: 59, lr: 1.61e-02, grad_scale: 16.0 +2023-03-08 21:24:50,276 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.3835, 3.4997, 5.2009, 4.2881, 3.2320, 2.8600, 4.4621, 5.2102], + device='cuda:2'), covar=tensor([0.0867, 0.1513, 0.0053, 0.0283, 0.0809, 0.1098, 0.0293, 0.0131], + device='cuda:2'), in_proj_covar=tensor([0.0131, 0.0205, 0.0077, 0.0147, 0.0163, 0.0166, 0.0152, 0.0107], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 21:25:17,160 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.556e+02 4.080e+02 4.948e+02 6.191e+02 1.850e+03, threshold=9.895e+02, percent-clipped=8.0 +2023-03-08 21:25:43,794 INFO [train.py:898] (2/4) Epoch 7, batch 1950, loss[loss=0.1983, simple_loss=0.2807, pruned_loss=0.05797, over 18400.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2931, pruned_loss=0.06709, over 3590547.98 frames. ], batch size: 48, lr: 1.61e-02, grad_scale: 16.0 +2023-03-08 21:25:54,211 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23764.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:26:01,029 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6552, 3.5610, 3.3044, 2.9014, 3.3513, 2.7356, 2.5988, 3.6240], + device='cuda:2'), covar=tensor([0.0026, 0.0062, 0.0060, 0.0120, 0.0063, 0.0137, 0.0144, 0.0049], + device='cuda:2'), in_proj_covar=tensor([0.0064, 0.0087, 0.0078, 0.0121, 0.0079, 0.0123, 0.0129, 0.0069], + device='cuda:2'), out_proj_covar=tensor([8.9268e-05, 1.3216e-04, 1.1734e-04, 1.9102e-04, 1.1800e-04, 1.9061e-04, + 1.9971e-04, 1.0091e-04], device='cuda:2') +2023-03-08 21:26:42,012 INFO [train.py:898] (2/4) Epoch 7, batch 2000, loss[loss=0.2288, simple_loss=0.3073, pruned_loss=0.07513, over 18377.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2938, pruned_loss=0.06739, over 3588175.85 frames. ], batch size: 50, lr: 1.61e-02, grad_scale: 8.0 +2023-03-08 21:26:50,349 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23812.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:26:51,857 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2762, 4.4531, 2.3620, 4.4121, 5.2449, 2.4706, 3.8532, 3.7928], + device='cuda:2'), covar=tensor([0.0075, 0.0932, 0.1645, 0.0498, 0.0055, 0.1337, 0.0671, 0.0757], + device='cuda:2'), in_proj_covar=tensor([0.0092, 0.0193, 0.0178, 0.0179, 0.0078, 0.0166, 0.0192, 0.0187], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 21:27:13,567 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.566e+02 3.909e+02 4.831e+02 5.895e+02 1.179e+03, threshold=9.662e+02, percent-clipped=2.0 +2023-03-08 21:27:29,079 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23844.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 21:27:40,874 INFO [train.py:898] (2/4) Epoch 7, batch 2050, loss[loss=0.2007, simple_loss=0.2803, pruned_loss=0.06056, over 18535.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2941, pruned_loss=0.06743, over 3588008.57 frames. ], batch size: 49, lr: 1.61e-02, grad_scale: 8.0 +2023-03-08 21:28:01,668 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23873.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:28:16,304 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23885.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:28:38,825 INFO [train.py:898] (2/4) Epoch 7, batch 2100, loss[loss=0.2276, simple_loss=0.3076, pruned_loss=0.07378, over 18107.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2937, pruned_loss=0.06742, over 3595434.59 frames. ], batch size: 62, lr: 1.61e-02, grad_scale: 8.0 +2023-03-08 21:28:57,399 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23921.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:29:04,420 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6545, 3.5303, 3.3777, 3.0331, 3.3431, 2.7742, 2.9033, 3.7112], + device='cuda:2'), covar=tensor([0.0028, 0.0053, 0.0056, 0.0104, 0.0067, 0.0143, 0.0126, 0.0048], + device='cuda:2'), in_proj_covar=tensor([0.0065, 0.0088, 0.0079, 0.0123, 0.0080, 0.0123, 0.0130, 0.0070], + device='cuda:2'), out_proj_covar=tensor([8.9503e-05, 1.3452e-04, 1.1830e-04, 1.9302e-04, 1.1837e-04, 1.9121e-04, + 1.9991e-04, 1.0212e-04], device='cuda:2') +2023-03-08 21:29:09,537 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.380e+02 3.638e+02 4.339e+02 5.858e+02 1.130e+03, threshold=8.677e+02, percent-clipped=1.0 +2023-03-08 21:29:10,876 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=23933.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:29:37,937 INFO [train.py:898] (2/4) Epoch 7, batch 2150, loss[loss=0.2658, simple_loss=0.3296, pruned_loss=0.101, over 12395.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2931, pruned_loss=0.06709, over 3595654.12 frames. ], batch size: 132, lr: 1.60e-02, grad_scale: 8.0 +2023-03-08 21:30:11,978 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5976, 2.8243, 4.1694, 3.8485, 2.4143, 4.4216, 3.8427, 2.7870], + device='cuda:2'), covar=tensor([0.0365, 0.1094, 0.0154, 0.0221, 0.1390, 0.0149, 0.0366, 0.0917], + device='cuda:2'), in_proj_covar=tensor([0.0162, 0.0203, 0.0118, 0.0122, 0.0196, 0.0162, 0.0172, 0.0184], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 21:30:16,057 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.1415, 5.4871, 2.8576, 5.2245, 5.1151, 5.5006, 5.3337, 2.8347], + device='cuda:2'), covar=tensor([0.0141, 0.0043, 0.0735, 0.0065, 0.0065, 0.0069, 0.0079, 0.0881], + device='cuda:2'), in_proj_covar=tensor([0.0067, 0.0056, 0.0085, 0.0072, 0.0067, 0.0057, 0.0071, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 21:30:38,662 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.35 vs. limit=5.0 +2023-03-08 21:30:41,408 INFO [train.py:898] (2/4) Epoch 7, batch 2200, loss[loss=0.2252, simple_loss=0.307, pruned_loss=0.07173, over 18307.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2923, pruned_loss=0.06658, over 3601712.05 frames. ], batch size: 54, lr: 1.60e-02, grad_scale: 8.0 +2023-03-08 21:30:45,182 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5499, 3.4248, 4.1239, 2.8838, 3.5868, 2.6364, 2.6166, 2.3729], + device='cuda:2'), covar=tensor([0.0592, 0.0488, 0.0075, 0.0353, 0.0495, 0.1546, 0.1632, 0.1091], + device='cuda:2'), in_proj_covar=tensor([0.0178, 0.0194, 0.0091, 0.0147, 0.0202, 0.0231, 0.0237, 0.0194], + device='cuda:2'), out_proj_covar=tensor([1.6393e-04, 1.8341e-04, 8.7546e-05, 1.3845e-04, 1.9188e-04, 2.1838e-04, + 2.2598e-04, 1.8514e-04], device='cuda:2') +2023-03-08 21:31:11,876 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.180e+02 3.947e+02 4.705e+02 5.545e+02 1.194e+03, threshold=9.409e+02, percent-clipped=3.0 +2023-03-08 21:31:17,860 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24037.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:31:40,404 INFO [train.py:898] (2/4) Epoch 7, batch 2250, loss[loss=0.1954, simple_loss=0.2801, pruned_loss=0.0553, over 18545.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2927, pruned_loss=0.06679, over 3594747.49 frames. ], batch size: 49, lr: 1.60e-02, grad_scale: 8.0 +2023-03-08 21:32:30,468 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24098.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:32:38,522 INFO [train.py:898] (2/4) Epoch 7, batch 2300, loss[loss=0.1791, simple_loss=0.2522, pruned_loss=0.05299, over 18456.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2935, pruned_loss=0.06751, over 3586566.49 frames. ], batch size: 43, lr: 1.60e-02, grad_scale: 8.0 +2023-03-08 21:33:08,946 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.576e+02 4.201e+02 5.158e+02 6.206e+02 1.861e+03, threshold=1.032e+03, percent-clipped=10.0 +2023-03-08 21:33:23,079 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=24144.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 21:33:36,520 INFO [train.py:898] (2/4) Epoch 7, batch 2350, loss[loss=0.2205, simple_loss=0.3011, pruned_loss=0.0699, over 16151.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2942, pruned_loss=0.06784, over 3582990.93 frames. ], batch size: 94, lr: 1.60e-02, grad_scale: 8.0 +2023-03-08 21:34:19,527 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24192.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:34:36,080 INFO [train.py:898] (2/4) Epoch 7, batch 2400, loss[loss=0.2317, simple_loss=0.3046, pruned_loss=0.07943, over 18121.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2939, pruned_loss=0.06779, over 3578724.99 frames. ], batch size: 62, lr: 1.60e-02, grad_scale: 8.0 +2023-03-08 21:35:01,331 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4012, 3.2658, 1.8026, 4.2070, 2.8133, 4.3724, 1.8835, 3.6700], + device='cuda:2'), covar=tensor([0.0560, 0.0815, 0.1417, 0.0384, 0.0995, 0.0185, 0.1347, 0.0392], + device='cuda:2'), in_proj_covar=tensor([0.0170, 0.0198, 0.0168, 0.0197, 0.0169, 0.0179, 0.0179, 0.0166], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 21:35:09,122 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.669e+02 3.723e+02 4.227e+02 5.514e+02 9.877e+02, threshold=8.454e+02, percent-clipped=0.0 +2023-03-08 21:35:17,475 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5881, 5.5452, 5.0509, 5.4551, 5.4337, 4.9334, 5.3979, 5.1068], + device='cuda:2'), covar=tensor([0.0296, 0.0264, 0.1299, 0.0645, 0.0473, 0.0313, 0.0320, 0.0704], + device='cuda:2'), in_proj_covar=tensor([0.0348, 0.0382, 0.0547, 0.0309, 0.0287, 0.0364, 0.0387, 0.0490], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-08 21:35:30,711 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5242, 1.8191, 2.8427, 2.7762, 3.5874, 5.1353, 4.3844, 4.5015], + device='cuda:2'), covar=tensor([0.0800, 0.1569, 0.1600, 0.0953, 0.1276, 0.0050, 0.0354, 0.0205], + device='cuda:2'), in_proj_covar=tensor([0.0193, 0.0246, 0.0243, 0.0222, 0.0326, 0.0142, 0.0219, 0.0171], + device='cuda:2'), out_proj_covar=tensor([1.2960e-04, 1.6615e-04, 1.7141e-04, 1.3827e-04, 2.2144e-04, 9.2552e-05, + 1.4245e-04, 1.1489e-04], device='cuda:2') +2023-03-08 21:35:35,277 INFO [train.py:898] (2/4) Epoch 7, batch 2450, loss[loss=0.193, simple_loss=0.2758, pruned_loss=0.05507, over 18365.00 frames. ], tot_loss[loss=0.2128, simple_loss=0.2925, pruned_loss=0.06657, over 3588144.14 frames. ], batch size: 46, lr: 1.59e-02, grad_scale: 8.0 +2023-03-08 21:36:20,063 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0172, 5.0557, 4.9711, 4.8187, 4.7587, 4.8939, 5.2403, 5.0965], + device='cuda:2'), covar=tensor([0.0067, 0.0056, 0.0068, 0.0090, 0.0067, 0.0093, 0.0090, 0.0124], + device='cuda:2'), in_proj_covar=tensor([0.0071, 0.0051, 0.0053, 0.0067, 0.0056, 0.0077, 0.0067, 0.0066], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0002, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 21:36:33,632 INFO [train.py:898] (2/4) Epoch 7, batch 2500, loss[loss=0.2439, simple_loss=0.3125, pruned_loss=0.0876, over 17054.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2922, pruned_loss=0.06652, over 3588094.96 frames. ], batch size: 78, lr: 1.59e-02, grad_scale: 8.0 +2023-03-08 21:37:06,510 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.422e+02 3.679e+02 4.544e+02 5.517e+02 9.659e+02, threshold=9.088e+02, percent-clipped=5.0 +2023-03-08 21:37:31,459 INFO [train.py:898] (2/4) Epoch 7, batch 2550, loss[loss=0.2299, simple_loss=0.3075, pruned_loss=0.07616, over 18498.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2924, pruned_loss=0.06655, over 3590956.33 frames. ], batch size: 51, lr: 1.59e-02, grad_scale: 8.0 +2023-03-08 21:38:08,159 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-08 21:38:16,259 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24393.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:38:18,728 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4537, 2.8599, 2.2997, 2.7474, 3.5581, 3.4093, 2.9438, 3.0211], + device='cuda:2'), covar=tensor([0.0216, 0.0182, 0.0691, 0.0404, 0.0130, 0.0094, 0.0356, 0.0267], + device='cuda:2'), in_proj_covar=tensor([0.0110, 0.0086, 0.0143, 0.0120, 0.0085, 0.0070, 0.0111, 0.0111], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 21:38:29,984 INFO [train.py:898] (2/4) Epoch 7, batch 2600, loss[loss=0.2209, simple_loss=0.2973, pruned_loss=0.0723, over 18536.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2919, pruned_loss=0.06626, over 3592952.33 frames. ], batch size: 49, lr: 1.59e-02, grad_scale: 4.0 +2023-03-08 21:38:53,588 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-08 21:39:05,101 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.252e+02 3.780e+02 4.661e+02 5.457e+02 1.160e+03, threshold=9.322e+02, percent-clipped=5.0 +2023-03-08 21:39:29,141 INFO [train.py:898] (2/4) Epoch 7, batch 2650, loss[loss=0.2128, simple_loss=0.297, pruned_loss=0.06428, over 18354.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2925, pruned_loss=0.06641, over 3577243.48 frames. ], batch size: 55, lr: 1.59e-02, grad_scale: 4.0 +2023-03-08 21:39:40,082 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-08 21:40:27,784 INFO [train.py:898] (2/4) Epoch 7, batch 2700, loss[loss=0.1965, simple_loss=0.2704, pruned_loss=0.06127, over 18371.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2922, pruned_loss=0.06643, over 3568105.94 frames. ], batch size: 42, lr: 1.59e-02, grad_scale: 4.0 +2023-03-08 21:40:50,039 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-03-08 21:41:02,504 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.355e+02 3.494e+02 4.469e+02 5.662e+02 1.849e+03, threshold=8.938e+02, percent-clipped=8.0 +2023-03-08 21:41:17,841 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24547.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:41:26,597 INFO [train.py:898] (2/4) Epoch 7, batch 2750, loss[loss=0.2265, simple_loss=0.3103, pruned_loss=0.07132, over 18214.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.292, pruned_loss=0.06625, over 3580259.94 frames. ], batch size: 60, lr: 1.59e-02, grad_scale: 4.0 +2023-03-08 21:41:43,979 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24569.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:42:25,725 INFO [train.py:898] (2/4) Epoch 7, batch 2800, loss[loss=0.1731, simple_loss=0.2521, pruned_loss=0.04706, over 18441.00 frames. ], tot_loss[loss=0.2121, simple_loss=0.2916, pruned_loss=0.06633, over 3580943.21 frames. ], batch size: 43, lr: 1.58e-02, grad_scale: 8.0 +2023-03-08 21:42:28,397 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7726, 3.5191, 3.4403, 3.0555, 3.3497, 2.8586, 2.8083, 3.8172], + device='cuda:2'), covar=tensor([0.0030, 0.0065, 0.0060, 0.0108, 0.0072, 0.0136, 0.0161, 0.0045], + device='cuda:2'), in_proj_covar=tensor([0.0067, 0.0089, 0.0078, 0.0125, 0.0080, 0.0124, 0.0132, 0.0072], + device='cuda:2'), out_proj_covar=tensor([9.2811e-05, 1.3493e-04, 1.1605e-04, 1.9552e-04, 1.1841e-04, 1.9217e-04, + 2.0275e-04, 1.0539e-04], device='cuda:2') +2023-03-08 21:42:29,406 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24608.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:42:47,482 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-08 21:42:55,616 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24630.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:43:01,135 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.569e+02 3.885e+02 4.580e+02 5.331e+02 1.147e+03, threshold=9.161e+02, percent-clipped=3.0 +2023-03-08 21:43:17,639 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-08 21:43:23,586 INFO [train.py:898] (2/4) Epoch 7, batch 2850, loss[loss=0.195, simple_loss=0.2714, pruned_loss=0.05933, over 18247.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.292, pruned_loss=0.06664, over 3581187.32 frames. ], batch size: 45, lr: 1.58e-02, grad_scale: 4.0 +2023-03-08 21:44:08,952 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=24693.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:44:22,182 INFO [train.py:898] (2/4) Epoch 7, batch 2900, loss[loss=0.2064, simple_loss=0.2912, pruned_loss=0.0608, over 18618.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2927, pruned_loss=0.06687, over 3586853.98 frames. ], batch size: 52, lr: 1.58e-02, grad_scale: 4.0 +2023-03-08 21:44:57,697 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.094e+02 3.800e+02 4.687e+02 5.853e+02 1.844e+03, threshold=9.374e+02, percent-clipped=5.0 +2023-03-08 21:45:05,184 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=24741.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:45:05,351 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7145, 4.1989, 2.7956, 3.8708, 3.9329, 4.1547, 4.0686, 2.8147], + device='cuda:2'), covar=tensor([0.0124, 0.0050, 0.0542, 0.0182, 0.0075, 0.0063, 0.0085, 0.0705], + device='cuda:2'), in_proj_covar=tensor([0.0067, 0.0057, 0.0084, 0.0071, 0.0067, 0.0057, 0.0071, 0.0087], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 21:45:20,844 INFO [train.py:898] (2/4) Epoch 7, batch 2950, loss[loss=0.2213, simple_loss=0.3032, pruned_loss=0.06971, over 18101.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2926, pruned_loss=0.06679, over 3580790.97 frames. ], batch size: 62, lr: 1.58e-02, grad_scale: 4.0 +2023-03-08 21:45:32,627 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24765.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:46:20,380 INFO [train.py:898] (2/4) Epoch 7, batch 3000, loss[loss=0.2208, simple_loss=0.3104, pruned_loss=0.06558, over 18357.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2912, pruned_loss=0.0661, over 3578273.18 frames. ], batch size: 55, lr: 1.58e-02, grad_scale: 4.0 +2023-03-08 21:46:20,380 INFO [train.py:923] (2/4) Computing validation loss +2023-03-08 21:46:29,816 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8600, 3.8391, 3.4520, 3.7521, 3.7677, 3.2617, 3.7508, 3.4936], + device='cuda:2'), covar=tensor([0.0485, 0.0558, 0.1981, 0.0831, 0.0600, 0.0666, 0.0543, 0.1267], + device='cuda:2'), in_proj_covar=tensor([0.0351, 0.0391, 0.0552, 0.0317, 0.0291, 0.0372, 0.0392, 0.0502], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-08 21:46:32,362 INFO [train.py:932] (2/4) Epoch 7, validation: loss=0.1689, simple_loss=0.2715, pruned_loss=0.03314, over 944034.00 frames. +2023-03-08 21:46:32,363 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-08 21:46:56,565 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5307, 5.4383, 4.9423, 5.4750, 5.4440, 4.8683, 5.3915, 5.1492], + device='cuda:2'), covar=tensor([0.0340, 0.0381, 0.1505, 0.0649, 0.0426, 0.0411, 0.0349, 0.0798], + device='cuda:2'), in_proj_covar=tensor([0.0349, 0.0391, 0.0551, 0.0317, 0.0288, 0.0371, 0.0390, 0.0498], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-08 21:46:57,845 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24826.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:47:08,250 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.797e+02 4.000e+02 4.650e+02 5.894e+02 1.091e+03, threshold=9.301e+02, percent-clipped=1.0 +2023-03-08 21:47:23,253 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24848.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:47:30,758 INFO [train.py:898] (2/4) Epoch 7, batch 3050, loss[loss=0.218, simple_loss=0.2921, pruned_loss=0.07193, over 18301.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2917, pruned_loss=0.06605, over 3577937.04 frames. ], batch size: 49, lr: 1.58e-02, grad_scale: 4.0 +2023-03-08 21:48:26,698 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24903.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:48:28,719 INFO [train.py:898] (2/4) Epoch 7, batch 3100, loss[loss=0.1719, simple_loss=0.2567, pruned_loss=0.04358, over 18382.00 frames. ], tot_loss[loss=0.212, simple_loss=0.292, pruned_loss=0.06603, over 3589012.44 frames. ], batch size: 46, lr: 1.57e-02, grad_scale: 2.0 +2023-03-08 21:48:33,519 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24909.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:48:52,688 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24925.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:49:05,434 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.662e+02 4.170e+02 4.880e+02 6.294e+02 1.409e+03, threshold=9.761e+02, percent-clipped=6.0 +2023-03-08 21:49:21,142 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2211, 4.2814, 2.3119, 4.1780, 5.2725, 2.5300, 3.6800, 3.8983], + device='cuda:2'), covar=tensor([0.0058, 0.0974, 0.1438, 0.0503, 0.0043, 0.1181, 0.0625, 0.0645], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0199, 0.0181, 0.0180, 0.0079, 0.0166, 0.0192, 0.0190], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 21:49:27,489 INFO [train.py:898] (2/4) Epoch 7, batch 3150, loss[loss=0.2588, simple_loss=0.3345, pruned_loss=0.09149, over 16255.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2925, pruned_loss=0.06627, over 3591670.13 frames. ], batch size: 94, lr: 1.57e-02, grad_scale: 2.0 +2023-03-08 21:49:32,368 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7047, 4.0437, 4.1759, 3.1052, 3.3481, 3.2961, 2.4075, 2.0351], + device='cuda:2'), covar=tensor([0.0159, 0.0152, 0.0059, 0.0279, 0.0324, 0.0208, 0.0704, 0.0867], + device='cuda:2'), in_proj_covar=tensor([0.0049, 0.0042, 0.0039, 0.0052, 0.0072, 0.0049, 0.0067, 0.0073], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0005, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 21:50:15,249 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24995.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:50:26,650 INFO [train.py:898] (2/4) Epoch 7, batch 3200, loss[loss=0.2238, simple_loss=0.3071, pruned_loss=0.0703, over 17036.00 frames. ], tot_loss[loss=0.2133, simple_loss=0.2931, pruned_loss=0.06677, over 3574171.72 frames. ], batch size: 78, lr: 1.57e-02, grad_scale: 4.0 +2023-03-08 21:51:03,253 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.251e+02 3.758e+02 4.426e+02 5.545e+02 1.381e+03, threshold=8.852e+02, percent-clipped=2.0 +2023-03-08 21:51:25,661 INFO [train.py:898] (2/4) Epoch 7, batch 3250, loss[loss=0.1875, simple_loss=0.2632, pruned_loss=0.05588, over 18248.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2933, pruned_loss=0.06674, over 3582227.72 frames. ], batch size: 45, lr: 1.57e-02, grad_scale: 4.0 +2023-03-08 21:51:27,238 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25056.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 21:52:24,656 INFO [train.py:898] (2/4) Epoch 7, batch 3300, loss[loss=0.214, simple_loss=0.2892, pruned_loss=0.06938, over 18415.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2926, pruned_loss=0.06639, over 3571742.99 frames. ], batch size: 48, lr: 1.57e-02, grad_scale: 4.0 +2023-03-08 21:52:42,792 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25121.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:52:52,365 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.1764, 5.3284, 2.7852, 5.0393, 4.8670, 5.2772, 4.9478, 2.7553], + device='cuda:2'), covar=tensor([0.0135, 0.0043, 0.0725, 0.0065, 0.0077, 0.0070, 0.0104, 0.0986], + device='cuda:2'), in_proj_covar=tensor([0.0069, 0.0057, 0.0084, 0.0072, 0.0068, 0.0057, 0.0072, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 21:53:01,193 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.469e+02 3.921e+02 4.598e+02 5.927e+02 2.644e+03, threshold=9.195e+02, percent-clipped=9.0 +2023-03-08 21:53:23,205 INFO [train.py:898] (2/4) Epoch 7, batch 3350, loss[loss=0.2029, simple_loss=0.2875, pruned_loss=0.05914, over 17915.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2922, pruned_loss=0.06647, over 3560632.61 frames. ], batch size: 65, lr: 1.57e-02, grad_scale: 4.0 +2023-03-08 21:54:19,429 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25203.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:54:20,405 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25204.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:54:21,381 INFO [train.py:898] (2/4) Epoch 7, batch 3400, loss[loss=0.2125, simple_loss=0.2946, pruned_loss=0.06522, over 18499.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2939, pruned_loss=0.06761, over 3554466.68 frames. ], batch size: 51, lr: 1.57e-02, grad_scale: 4.0 +2023-03-08 21:54:44,220 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25225.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:54:56,993 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.655e+02 3.734e+02 4.394e+02 5.547e+02 1.008e+03, threshold=8.789e+02, percent-clipped=3.0 +2023-03-08 21:55:14,405 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25251.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:55:19,394 INFO [train.py:898] (2/4) Epoch 7, batch 3450, loss[loss=0.1733, simple_loss=0.2583, pruned_loss=0.04421, over 18358.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2932, pruned_loss=0.06706, over 3567460.50 frames. ], batch size: 46, lr: 1.56e-02, grad_scale: 4.0 +2023-03-08 21:55:39,657 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25273.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:55:46,726 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8233, 3.4073, 5.0121, 2.8704, 4.1344, 2.6447, 2.8969, 1.9401], + device='cuda:2'), covar=tensor([0.0772, 0.0724, 0.0051, 0.0590, 0.0518, 0.1885, 0.2058, 0.1598], + device='cuda:2'), in_proj_covar=tensor([0.0178, 0.0191, 0.0094, 0.0149, 0.0204, 0.0233, 0.0243, 0.0190], + device='cuda:2'), out_proj_covar=tensor([1.6147e-04, 1.7935e-04, 8.9928e-05, 1.3895e-04, 1.9195e-04, 2.1867e-04, + 2.2956e-04, 1.8073e-04], device='cuda:2') +2023-03-08 21:55:46,953 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.95 vs. limit=5.0 +2023-03-08 21:56:17,144 INFO [train.py:898] (2/4) Epoch 7, batch 3500, loss[loss=0.1842, simple_loss=0.2621, pruned_loss=0.05311, over 18496.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.293, pruned_loss=0.06694, over 3565673.58 frames. ], batch size: 47, lr: 1.56e-02, grad_scale: 2.0 +2023-03-08 21:56:53,685 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.862e+02 4.141e+02 4.748e+02 6.314e+02 1.477e+03, threshold=9.496e+02, percent-clipped=11.0 +2023-03-08 21:57:08,514 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25351.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 21:57:08,744 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4557, 3.6067, 4.9793, 4.2390, 3.0583, 3.0658, 4.2023, 5.1191], + device='cuda:2'), covar=tensor([0.0897, 0.1364, 0.0122, 0.0306, 0.0874, 0.0963, 0.0373, 0.0122], + device='cuda:2'), in_proj_covar=tensor([0.0131, 0.0211, 0.0081, 0.0148, 0.0164, 0.0165, 0.0155, 0.0107], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-08 21:57:12,753 INFO [train.py:898] (2/4) Epoch 7, batch 3550, loss[loss=0.2004, simple_loss=0.284, pruned_loss=0.05839, over 18382.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.292, pruned_loss=0.06628, over 3584074.96 frames. ], batch size: 50, lr: 1.56e-02, grad_scale: 2.0 +2023-03-08 21:58:07,628 INFO [train.py:898] (2/4) Epoch 7, batch 3600, loss[loss=0.1986, simple_loss=0.2849, pruned_loss=0.05611, over 18342.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2925, pruned_loss=0.0666, over 3590690.39 frames. ], batch size: 55, lr: 1.56e-02, grad_scale: 4.0 +2023-03-08 21:58:23,103 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.20 vs. limit=5.0 +2023-03-08 21:58:24,765 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25421.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 21:58:31,927 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25428.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 21:58:40,129 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.475e+02 3.916e+02 4.844e+02 6.068e+02 1.506e+03, threshold=9.689e+02, percent-clipped=7.0 +2023-03-08 21:59:12,578 INFO [train.py:898] (2/4) Epoch 8, batch 0, loss[loss=0.2161, simple_loss=0.3022, pruned_loss=0.06493, over 18574.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.3022, pruned_loss=0.06493, over 18574.00 frames. ], batch size: 54, lr: 1.47e-02, grad_scale: 8.0 +2023-03-08 21:59:12,578 INFO [train.py:923] (2/4) Computing validation loss +2023-03-08 21:59:24,297 INFO [train.py:932] (2/4) Epoch 8, validation: loss=0.17, simple_loss=0.2728, pruned_loss=0.03358, over 944034.00 frames. +2023-03-08 21:59:24,298 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-08 21:59:48,217 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.83 vs. limit=2.0 +2023-03-08 21:59:59,982 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25469.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:00:19,630 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.98 vs. limit=2.0 +2023-03-08 22:00:22,362 INFO [train.py:898] (2/4) Epoch 8, batch 50, loss[loss=0.1952, simple_loss=0.2829, pruned_loss=0.05377, over 18413.00 frames. ], tot_loss[loss=0.2108, simple_loss=0.292, pruned_loss=0.06473, over 818415.40 frames. ], batch size: 50, lr: 1.47e-02, grad_scale: 8.0 +2023-03-08 22:00:22,786 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25489.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 22:00:26,171 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7163, 3.4884, 1.8706, 4.4403, 3.0082, 4.6101, 2.2682, 4.1629], + device='cuda:2'), covar=tensor([0.0420, 0.0657, 0.1394, 0.0342, 0.0795, 0.0189, 0.1095, 0.0270], + device='cuda:2'), in_proj_covar=tensor([0.0167, 0.0195, 0.0170, 0.0194, 0.0168, 0.0184, 0.0177, 0.0168], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 22:00:39,262 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25504.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:00:53,955 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5703, 3.3979, 4.8659, 4.2082, 2.7503, 2.7072, 4.0319, 5.0635], + device='cuda:2'), covar=tensor([0.0856, 0.1556, 0.0086, 0.0324, 0.1117, 0.1126, 0.0425, 0.0097], + device='cuda:2'), in_proj_covar=tensor([0.0131, 0.0213, 0.0081, 0.0148, 0.0167, 0.0168, 0.0155, 0.0108], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-08 22:01:18,379 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.473e+02 3.465e+02 4.274e+02 5.083e+02 8.127e+02, threshold=8.548e+02, percent-clipped=0.0 +2023-03-08 22:01:20,761 INFO [train.py:898] (2/4) Epoch 8, batch 100, loss[loss=0.207, simple_loss=0.2845, pruned_loss=0.06472, over 18267.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2902, pruned_loss=0.06451, over 1430768.36 frames. ], batch size: 47, lr: 1.47e-02, grad_scale: 8.0 +2023-03-08 22:01:35,576 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25552.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:01:47,519 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-08 22:02:17,038 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-08 22:02:19,593 INFO [train.py:898] (2/4) Epoch 8, batch 150, loss[loss=0.1875, simple_loss=0.2761, pruned_loss=0.0495, over 18620.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2907, pruned_loss=0.06425, over 1912293.61 frames. ], batch size: 52, lr: 1.46e-02, grad_scale: 8.0 +2023-03-08 22:02:44,859 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0416, 2.4380, 3.1165, 3.1942, 2.4271, 3.3327, 3.2232, 2.5154], + device='cuda:2'), covar=tensor([0.0345, 0.1005, 0.0233, 0.0200, 0.1095, 0.0234, 0.0458, 0.0744], + device='cuda:2'), in_proj_covar=tensor([0.0176, 0.0210, 0.0127, 0.0127, 0.0203, 0.0171, 0.0185, 0.0190], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 22:02:57,918 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1985, 4.2293, 2.4623, 4.2889, 5.2341, 2.3867, 3.7912, 3.8400], + device='cuda:2'), covar=tensor([0.0060, 0.0857, 0.1427, 0.0494, 0.0044, 0.1350, 0.0584, 0.0670], + device='cuda:2'), in_proj_covar=tensor([0.0091, 0.0194, 0.0178, 0.0179, 0.0077, 0.0168, 0.0190, 0.0190], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 22:03:02,970 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4036, 3.5715, 5.0323, 4.2050, 3.2069, 2.6878, 4.2775, 5.0154], + device='cuda:2'), covar=tensor([0.0858, 0.1292, 0.0054, 0.0270, 0.0862, 0.1099, 0.0331, 0.0124], + device='cuda:2'), in_proj_covar=tensor([0.0131, 0.0214, 0.0081, 0.0148, 0.0166, 0.0168, 0.0155, 0.0109], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-08 22:03:16,567 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.739e+02 3.654e+02 4.521e+02 5.323e+02 1.367e+03, threshold=9.043e+02, percent-clipped=1.0 +2023-03-08 22:03:18,863 INFO [train.py:898] (2/4) Epoch 8, batch 200, loss[loss=0.1889, simple_loss=0.2682, pruned_loss=0.05475, over 18502.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2918, pruned_loss=0.06584, over 2270086.53 frames. ], batch size: 47, lr: 1.46e-02, grad_scale: 8.0 +2023-03-08 22:03:32,338 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25651.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 22:04:01,594 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-08 22:04:17,980 INFO [train.py:898] (2/4) Epoch 8, batch 250, loss[loss=0.2017, simple_loss=0.2864, pruned_loss=0.05853, over 18417.00 frames. ], tot_loss[loss=0.2122, simple_loss=0.2925, pruned_loss=0.06597, over 2557297.98 frames. ], batch size: 52, lr: 1.46e-02, grad_scale: 8.0 +2023-03-08 22:04:23,598 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-08 22:04:25,206 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6684, 3.2454, 1.8791, 4.2688, 2.8253, 4.5169, 2.2564, 3.9578], + device='cuda:2'), covar=tensor([0.0461, 0.0867, 0.1531, 0.0532, 0.0926, 0.0235, 0.1117, 0.0337], + device='cuda:2'), in_proj_covar=tensor([0.0173, 0.0197, 0.0172, 0.0200, 0.0172, 0.0188, 0.0180, 0.0171], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-08 22:04:29,461 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=25699.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:05:08,545 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8987, 4.8992, 5.0051, 4.8404, 4.7137, 4.7153, 5.2433, 5.1604], + device='cuda:2'), covar=tensor([0.0057, 0.0082, 0.0060, 0.0079, 0.0063, 0.0113, 0.0087, 0.0078], + device='cuda:2'), in_proj_covar=tensor([0.0069, 0.0051, 0.0052, 0.0066, 0.0054, 0.0076, 0.0064, 0.0063], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0002, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 22:05:14,433 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.583e+02 3.732e+02 4.597e+02 5.455e+02 9.874e+02, threshold=9.193e+02, percent-clipped=1.0 +2023-03-08 22:05:17,282 INFO [train.py:898] (2/4) Epoch 8, batch 300, loss[loss=0.202, simple_loss=0.2912, pruned_loss=0.05635, over 18508.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2907, pruned_loss=0.0648, over 2787969.80 frames. ], batch size: 53, lr: 1.46e-02, grad_scale: 8.0 +2023-03-08 22:05:24,385 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4623, 3.7361, 5.3138, 4.5396, 3.3740, 2.9974, 4.3638, 5.3426], + device='cuda:2'), covar=tensor([0.0967, 0.1521, 0.0054, 0.0277, 0.0896, 0.1083, 0.0370, 0.0120], + device='cuda:2'), in_proj_covar=tensor([0.0135, 0.0220, 0.0083, 0.0152, 0.0171, 0.0173, 0.0159, 0.0114], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-08 22:05:57,018 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.1460, 4.0677, 3.7556, 4.0989, 4.0884, 3.6785, 4.0237, 3.8515], + device='cuda:2'), covar=tensor([0.0398, 0.0664, 0.1415, 0.0597, 0.0474, 0.0504, 0.0447, 0.0944], + device='cuda:2'), in_proj_covar=tensor([0.0350, 0.0408, 0.0552, 0.0323, 0.0296, 0.0379, 0.0394, 0.0504], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-08 22:06:10,061 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25784.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 22:06:15,834 INFO [train.py:898] (2/4) Epoch 8, batch 350, loss[loss=0.2093, simple_loss=0.296, pruned_loss=0.0613, over 17826.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.288, pruned_loss=0.06336, over 2962876.53 frames. ], batch size: 70, lr: 1.46e-02, grad_scale: 8.0 +2023-03-08 22:07:08,645 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9760, 3.6550, 4.8273, 3.0449, 4.0134, 2.5054, 2.8378, 2.1765], + device='cuda:2'), covar=tensor([0.0688, 0.0636, 0.0055, 0.0487, 0.0523, 0.1961, 0.2034, 0.1346], + device='cuda:2'), in_proj_covar=tensor([0.0180, 0.0193, 0.0094, 0.0151, 0.0206, 0.0235, 0.0247, 0.0193], + device='cuda:2'), out_proj_covar=tensor([1.6203e-04, 1.8095e-04, 8.9141e-05, 1.4040e-04, 1.9236e-04, 2.2033e-04, + 2.3167e-04, 1.8266e-04], device='cuda:2') +2023-03-08 22:07:11,595 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.442e+02 3.462e+02 4.130e+02 5.163e+02 1.142e+03, threshold=8.260e+02, percent-clipped=1.0 +2023-03-08 22:07:14,543 INFO [train.py:898] (2/4) Epoch 8, batch 400, loss[loss=0.2004, simple_loss=0.2776, pruned_loss=0.06162, over 18161.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2882, pruned_loss=0.06349, over 3101162.01 frames. ], batch size: 44, lr: 1.46e-02, grad_scale: 8.0 +2023-03-08 22:07:37,317 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25858.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 22:07:40,930 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-03-08 22:07:49,562 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3114, 4.4082, 2.6446, 4.4502, 5.3428, 2.5903, 3.9825, 4.2326], + device='cuda:2'), covar=tensor([0.0052, 0.0876, 0.1347, 0.0426, 0.0040, 0.1278, 0.0562, 0.0540], + device='cuda:2'), in_proj_covar=tensor([0.0093, 0.0198, 0.0180, 0.0179, 0.0078, 0.0168, 0.0191, 0.0190], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 22:07:57,438 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7566, 3.6368, 3.4903, 2.9847, 3.4837, 2.7315, 2.8043, 3.8682], + device='cuda:2'), covar=tensor([0.0033, 0.0058, 0.0071, 0.0113, 0.0065, 0.0154, 0.0139, 0.0033], + device='cuda:2'), in_proj_covar=tensor([0.0067, 0.0091, 0.0082, 0.0125, 0.0082, 0.0125, 0.0131, 0.0071], + device='cuda:2'), out_proj_covar=tensor([9.1473e-05, 1.3598e-04, 1.2060e-04, 1.9429e-04, 1.2246e-04, 1.9155e-04, + 1.9977e-04, 1.0348e-04], device='cuda:2') +2023-03-08 22:07:59,622 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7059, 4.7134, 4.7613, 4.5313, 4.5494, 4.5799, 5.0634, 4.9834], + device='cuda:2'), covar=tensor([0.0066, 0.0080, 0.0087, 0.0113, 0.0075, 0.0145, 0.0070, 0.0094], + device='cuda:2'), in_proj_covar=tensor([0.0072, 0.0052, 0.0054, 0.0069, 0.0056, 0.0078, 0.0066, 0.0065], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0002, 0.0003, 0.0002, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 22:08:04,815 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6927, 4.2946, 4.4210, 3.4847, 3.5779, 3.4327, 2.4357, 1.8729], + device='cuda:2'), covar=tensor([0.0168, 0.0199, 0.0105, 0.0223, 0.0319, 0.0188, 0.0774, 0.1020], + device='cuda:2'), in_proj_covar=tensor([0.0048, 0.0042, 0.0040, 0.0052, 0.0073, 0.0049, 0.0068, 0.0074], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0005, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 22:08:13,028 INFO [train.py:898] (2/4) Epoch 8, batch 450, loss[loss=0.1862, simple_loss=0.2547, pruned_loss=0.05888, over 18431.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2885, pruned_loss=0.06346, over 3211604.98 frames. ], batch size: 43, lr: 1.46e-02, grad_scale: 8.0 +2023-03-08 22:08:20,574 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5513, 2.6885, 2.5952, 2.7428, 3.5605, 3.4350, 3.0184, 2.8398], + device='cuda:2'), covar=tensor([0.0208, 0.0304, 0.0516, 0.0300, 0.0127, 0.0107, 0.0290, 0.0320], + device='cuda:2'), in_proj_covar=tensor([0.0106, 0.0086, 0.0136, 0.0116, 0.0083, 0.0070, 0.0110, 0.0110], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 22:08:31,484 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7319, 4.2377, 4.3718, 3.3182, 3.6209, 3.4755, 2.4162, 1.7574], + device='cuda:2'), covar=tensor([0.0163, 0.0151, 0.0062, 0.0218, 0.0311, 0.0166, 0.0700, 0.0917], + device='cuda:2'), in_proj_covar=tensor([0.0048, 0.0042, 0.0040, 0.0052, 0.0073, 0.0049, 0.0068, 0.0073], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0005, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 22:08:44,010 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9078, 3.8289, 5.0128, 3.1561, 4.0222, 2.7309, 2.9379, 1.9642], + device='cuda:2'), covar=tensor([0.0701, 0.0625, 0.0054, 0.0501, 0.0586, 0.1857, 0.2085, 0.1618], + device='cuda:2'), in_proj_covar=tensor([0.0180, 0.0193, 0.0095, 0.0152, 0.0206, 0.0236, 0.0248, 0.0195], + device='cuda:2'), out_proj_covar=tensor([1.6278e-04, 1.8093e-04, 8.9980e-05, 1.4147e-04, 1.9286e-04, 2.2126e-04, + 2.3225e-04, 1.8433e-04], device='cuda:2') +2023-03-08 22:08:48,664 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25919.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 22:09:09,607 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.689e+02 3.778e+02 4.755e+02 5.799e+02 1.474e+03, threshold=9.510e+02, percent-clipped=4.0 +2023-03-08 22:09:11,983 INFO [train.py:898] (2/4) Epoch 8, batch 500, loss[loss=0.2072, simple_loss=0.2893, pruned_loss=0.06261, over 17795.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2875, pruned_loss=0.06329, over 3283091.07 frames. ], batch size: 70, lr: 1.45e-02, grad_scale: 8.0 +2023-03-08 22:10:10,521 INFO [train.py:898] (2/4) Epoch 8, batch 550, loss[loss=0.2033, simple_loss=0.2877, pruned_loss=0.05947, over 18616.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2874, pruned_loss=0.06317, over 3359625.95 frames. ], batch size: 52, lr: 1.45e-02, grad_scale: 8.0 +2023-03-08 22:11:10,803 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.250e+02 3.451e+02 4.216e+02 4.967e+02 1.068e+03, threshold=8.432e+02, percent-clipped=2.0 +2023-03-08 22:11:13,149 INFO [train.py:898] (2/4) Epoch 8, batch 600, loss[loss=0.2124, simple_loss=0.2949, pruned_loss=0.06494, over 16046.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2866, pruned_loss=0.0628, over 3414499.47 frames. ], batch size: 94, lr: 1.45e-02, grad_scale: 8.0 +2023-03-08 22:11:44,870 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.3904, 5.0037, 5.0243, 4.9790, 4.6142, 4.8938, 4.2367, 4.8315], + device='cuda:2'), covar=tensor([0.0260, 0.0308, 0.0196, 0.0263, 0.0416, 0.0228, 0.1323, 0.0308], + device='cuda:2'), in_proj_covar=tensor([0.0153, 0.0201, 0.0183, 0.0202, 0.0193, 0.0201, 0.0265, 0.0187], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-08 22:11:46,202 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5324, 1.9466, 2.7528, 2.7111, 3.5494, 5.1117, 4.4054, 4.1214], + device='cuda:2'), covar=tensor([0.0847, 0.1492, 0.1791, 0.1024, 0.1344, 0.0051, 0.0328, 0.0291], + device='cuda:2'), in_proj_covar=tensor([0.0204, 0.0258, 0.0264, 0.0230, 0.0339, 0.0154, 0.0229, 0.0176], + device='cuda:2'), out_proj_covar=tensor([1.3549e-04, 1.7204e-04, 1.8013e-04, 1.4138e-04, 2.2518e-04, 9.9839e-05, + 1.4574e-04, 1.1688e-04], device='cuda:2') +2023-03-08 22:12:06,803 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26084.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 22:12:12,075 INFO [train.py:898] (2/4) Epoch 8, batch 650, loss[loss=0.2489, simple_loss=0.3264, pruned_loss=0.0857, over 16087.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2886, pruned_loss=0.0634, over 3449119.40 frames. ], batch size: 94, lr: 1.45e-02, grad_scale: 8.0 +2023-03-08 22:12:28,556 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26102.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:13:03,431 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26132.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 22:13:08,683 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.333e+02 3.685e+02 4.568e+02 5.569e+02 1.081e+03, threshold=9.136e+02, percent-clipped=5.0 +2023-03-08 22:13:11,017 INFO [train.py:898] (2/4) Epoch 8, batch 700, loss[loss=0.1936, simple_loss=0.2874, pruned_loss=0.0499, over 18402.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2885, pruned_loss=0.06334, over 3487849.74 frames. ], batch size: 52, lr: 1.45e-02, grad_scale: 8.0 +2023-03-08 22:13:39,898 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26163.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:13:46,037 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26168.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:13:48,402 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9001, 3.4188, 4.3371, 4.2308, 2.9856, 4.6775, 4.1445, 3.0036], + device='cuda:2'), covar=tensor([0.0374, 0.0954, 0.0185, 0.0174, 0.1275, 0.0146, 0.0518, 0.0991], + device='cuda:2'), in_proj_covar=tensor([0.0172, 0.0210, 0.0130, 0.0127, 0.0204, 0.0173, 0.0185, 0.0190], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 22:14:09,257 INFO [train.py:898] (2/4) Epoch 8, batch 750, loss[loss=0.2049, simple_loss=0.2898, pruned_loss=0.05996, over 18363.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2897, pruned_loss=0.06392, over 3501881.18 frames. ], batch size: 50, lr: 1.45e-02, grad_scale: 8.0 +2023-03-08 22:14:32,986 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.12 vs. limit=5.0 +2023-03-08 22:14:39,858 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26214.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 22:14:57,233 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26229.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:15:05,903 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.506e+02 3.613e+02 4.168e+02 4.953e+02 1.109e+03, threshold=8.337e+02, percent-clipped=3.0 +2023-03-08 22:15:08,151 INFO [train.py:898] (2/4) Epoch 8, batch 800, loss[loss=0.2219, simple_loss=0.3011, pruned_loss=0.07138, over 17972.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.288, pruned_loss=0.06369, over 3514672.80 frames. ], batch size: 65, lr: 1.45e-02, grad_scale: 8.0 +2023-03-08 22:15:21,551 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26250.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:16:07,221 INFO [train.py:898] (2/4) Epoch 8, batch 850, loss[loss=0.1874, simple_loss=0.2643, pruned_loss=0.05529, over 18353.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.287, pruned_loss=0.06317, over 3531719.78 frames. ], batch size: 46, lr: 1.45e-02, grad_scale: 8.0 +2023-03-08 22:16:33,986 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26311.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:17:04,268 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.475e+02 3.702e+02 4.449e+02 5.668e+02 1.488e+03, threshold=8.898e+02, percent-clipped=3.0 +2023-03-08 22:17:06,539 INFO [train.py:898] (2/4) Epoch 8, batch 900, loss[loss=0.2034, simple_loss=0.2924, pruned_loss=0.0572, over 18504.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2868, pruned_loss=0.06283, over 3540070.27 frames. ], batch size: 53, lr: 1.44e-02, grad_scale: 8.0 +2023-03-08 22:17:22,344 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-08 22:18:06,764 INFO [train.py:898] (2/4) Epoch 8, batch 950, loss[loss=0.1902, simple_loss=0.276, pruned_loss=0.0522, over 18291.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2862, pruned_loss=0.0627, over 3554437.58 frames. ], batch size: 49, lr: 1.44e-02, grad_scale: 8.0 +2023-03-08 22:18:17,757 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.26 vs. limit=5.0 +2023-03-08 22:18:37,071 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26414.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:18:39,474 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3617, 3.2340, 1.5873, 4.2854, 2.8235, 4.3393, 2.0212, 3.8341], + device='cuda:2'), covar=tensor([0.0542, 0.0893, 0.1723, 0.0375, 0.0956, 0.0293, 0.1287, 0.0352], + device='cuda:2'), in_proj_covar=tensor([0.0172, 0.0198, 0.0171, 0.0202, 0.0171, 0.0194, 0.0178, 0.0172], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 22:19:04,444 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.644e+02 3.699e+02 4.361e+02 5.008e+02 1.213e+03, threshold=8.721e+02, percent-clipped=3.0 +2023-03-08 22:19:06,711 INFO [train.py:898] (2/4) Epoch 8, batch 1000, loss[loss=0.1881, simple_loss=0.2795, pruned_loss=0.04838, over 18481.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2863, pruned_loss=0.06254, over 3554662.02 frames. ], batch size: 51, lr: 1.44e-02, grad_scale: 8.0 +2023-03-08 22:19:29,039 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26458.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:19:37,149 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-08 22:19:50,137 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26475.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:20:06,562 INFO [train.py:898] (2/4) Epoch 8, batch 1050, loss[loss=0.2029, simple_loss=0.2865, pruned_loss=0.0596, over 18378.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2861, pruned_loss=0.06219, over 3571530.24 frames. ], batch size: 50, lr: 1.44e-02, grad_scale: 8.0 +2023-03-08 22:20:19,368 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5344, 4.1601, 4.2340, 3.3679, 3.4914, 3.3896, 2.5932, 1.9048], + device='cuda:2'), covar=tensor([0.0245, 0.0210, 0.0098, 0.0249, 0.0370, 0.0229, 0.0703, 0.0985], + device='cuda:2'), in_proj_covar=tensor([0.0050, 0.0043, 0.0042, 0.0054, 0.0073, 0.0049, 0.0069, 0.0073], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 22:20:35,351 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26514.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 22:20:47,864 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26524.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:21:03,508 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.558e+02 3.535e+02 4.269e+02 5.489e+02 1.390e+03, threshold=8.539e+02, percent-clipped=8.0 +2023-03-08 22:21:05,731 INFO [train.py:898] (2/4) Epoch 8, batch 1100, loss[loss=0.2651, simple_loss=0.3312, pruned_loss=0.09951, over 12737.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2869, pruned_loss=0.06335, over 3553132.34 frames. ], batch size: 132, lr: 1.44e-02, grad_scale: 8.0 +2023-03-08 22:21:32,868 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26562.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 22:22:05,753 INFO [train.py:898] (2/4) Epoch 8, batch 1150, loss[loss=0.2286, simple_loss=0.3045, pruned_loss=0.07634, over 17007.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2873, pruned_loss=0.06363, over 3551026.78 frames. ], batch size: 78, lr: 1.44e-02, grad_scale: 8.0 +2023-03-08 22:22:24,915 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26606.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:23:02,046 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.150e+02 3.587e+02 4.399e+02 5.427e+02 1.423e+03, threshold=8.799e+02, percent-clipped=5.0 +2023-03-08 22:23:04,976 INFO [train.py:898] (2/4) Epoch 8, batch 1200, loss[loss=0.184, simple_loss=0.2604, pruned_loss=0.0538, over 18173.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2877, pruned_loss=0.06394, over 3559643.86 frames. ], batch size: 44, lr: 1.44e-02, grad_scale: 8.0 +2023-03-08 22:23:17,672 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.1453, 4.1040, 3.7891, 4.0828, 4.1033, 3.6888, 4.0514, 3.9420], + device='cuda:2'), covar=tensor([0.0415, 0.0567, 0.1411, 0.0641, 0.0485, 0.0419, 0.0415, 0.0779], + device='cuda:2'), in_proj_covar=tensor([0.0352, 0.0408, 0.0555, 0.0325, 0.0297, 0.0377, 0.0396, 0.0514], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-08 22:23:37,404 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1833, 4.9363, 5.3272, 5.3149, 5.1066, 5.9223, 5.5130, 5.2575], + device='cuda:2'), covar=tensor([0.0999, 0.0651, 0.0716, 0.0561, 0.1401, 0.0752, 0.0601, 0.1469], + device='cuda:2'), in_proj_covar=tensor([0.0284, 0.0215, 0.0221, 0.0222, 0.0262, 0.0318, 0.0209, 0.0306], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-08 22:24:03,509 INFO [train.py:898] (2/4) Epoch 8, batch 1250, loss[loss=0.2237, simple_loss=0.3035, pruned_loss=0.07191, over 18366.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.288, pruned_loss=0.06368, over 3562062.22 frames. ], batch size: 56, lr: 1.43e-02, grad_scale: 8.0 +2023-03-08 22:24:59,403 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.195e+02 3.433e+02 4.104e+02 5.033e+02 1.173e+03, threshold=8.208e+02, percent-clipped=2.0 +2023-03-08 22:25:01,425 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6400, 5.2121, 5.1982, 5.1429, 4.7781, 5.0967, 4.4320, 4.9423], + device='cuda:2'), covar=tensor([0.0192, 0.0264, 0.0185, 0.0264, 0.0360, 0.0200, 0.1148, 0.0301], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0198, 0.0183, 0.0204, 0.0194, 0.0202, 0.0265, 0.0188], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-08 22:25:02,173 INFO [train.py:898] (2/4) Epoch 8, batch 1300, loss[loss=0.2163, simple_loss=0.3054, pruned_loss=0.06361, over 18369.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2886, pruned_loss=0.06347, over 3579414.44 frames. ], batch size: 55, lr: 1.43e-02, grad_scale: 8.0 +2023-03-08 22:25:25,037 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26758.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:25:38,729 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26770.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:26:00,406 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.79 vs. limit=2.0 +2023-03-08 22:26:00,818 INFO [train.py:898] (2/4) Epoch 8, batch 1350, loss[loss=0.2173, simple_loss=0.308, pruned_loss=0.06328, over 18409.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2876, pruned_loss=0.06287, over 3595043.17 frames. ], batch size: 52, lr: 1.43e-02, grad_scale: 8.0 +2023-03-08 22:26:21,902 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26806.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:26:42,294 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26824.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:26:57,544 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.883e+02 3.529e+02 4.433e+02 5.428e+02 1.307e+03, threshold=8.866e+02, percent-clipped=5.0 +2023-03-08 22:27:00,018 INFO [train.py:898] (2/4) Epoch 8, batch 1400, loss[loss=0.2016, simple_loss=0.2846, pruned_loss=0.05936, over 18355.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2871, pruned_loss=0.06285, over 3588730.92 frames. ], batch size: 46, lr: 1.43e-02, grad_scale: 8.0 +2023-03-08 22:27:39,754 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26872.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:27:59,692 INFO [train.py:898] (2/4) Epoch 8, batch 1450, loss[loss=0.2039, simple_loss=0.2955, pruned_loss=0.05614, over 18395.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2882, pruned_loss=0.06331, over 3587540.10 frames. ], batch size: 50, lr: 1.43e-02, grad_scale: 8.0 +2023-03-08 22:28:11,622 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.3013, 5.5140, 2.9706, 5.3218, 5.2529, 5.5513, 5.4040, 2.7825], + device='cuda:2'), covar=tensor([0.0136, 0.0051, 0.0670, 0.0058, 0.0063, 0.0056, 0.0070, 0.0924], + device='cuda:2'), in_proj_covar=tensor([0.0071, 0.0060, 0.0087, 0.0074, 0.0069, 0.0057, 0.0073, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 22:28:18,417 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26904.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 22:28:21,068 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26906.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:28:38,546 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-03-08 22:28:56,608 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.356e+02 3.688e+02 4.427e+02 5.301e+02 1.390e+03, threshold=8.853e+02, percent-clipped=1.0 +2023-03-08 22:28:58,822 INFO [train.py:898] (2/4) Epoch 8, batch 1500, loss[loss=0.2034, simple_loss=0.283, pruned_loss=0.06187, over 18423.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2883, pruned_loss=0.06357, over 3575999.37 frames. ], batch size: 48, lr: 1.43e-02, grad_scale: 8.0 +2023-03-08 22:29:09,251 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4554, 5.0431, 5.5458, 5.4800, 5.3004, 6.1333, 5.7925, 5.6196], + device='cuda:2'), covar=tensor([0.0937, 0.0659, 0.0517, 0.0596, 0.1351, 0.0695, 0.0509, 0.1467], + device='cuda:2'), in_proj_covar=tensor([0.0286, 0.0215, 0.0221, 0.0224, 0.0265, 0.0319, 0.0211, 0.0312], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-08 22:29:12,571 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26950.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:29:13,739 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8164, 4.6915, 4.9028, 4.6155, 4.5869, 4.6801, 5.1009, 4.9177], + device='cuda:2'), covar=tensor([0.0062, 0.0083, 0.0066, 0.0097, 0.0071, 0.0106, 0.0083, 0.0114], + device='cuda:2'), in_proj_covar=tensor([0.0072, 0.0052, 0.0053, 0.0067, 0.0055, 0.0077, 0.0065, 0.0064], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0002, 0.0003, 0.0002, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 22:29:17,100 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=26954.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:29:30,476 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26965.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 22:29:58,010 INFO [train.py:898] (2/4) Epoch 8, batch 1550, loss[loss=0.2246, simple_loss=0.314, pruned_loss=0.06758, over 18134.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2879, pruned_loss=0.06347, over 3574064.16 frames. ], batch size: 62, lr: 1.43e-02, grad_scale: 8.0 +2023-03-08 22:30:24,934 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27011.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:30:54,389 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.371e+02 3.938e+02 4.762e+02 5.471e+02 1.136e+03, threshold=9.525e+02, percent-clipped=4.0 +2023-03-08 22:30:56,657 INFO [train.py:898] (2/4) Epoch 8, batch 1600, loss[loss=0.2026, simple_loss=0.2879, pruned_loss=0.0587, over 17892.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2884, pruned_loss=0.06325, over 3585191.22 frames. ], batch size: 70, lr: 1.43e-02, grad_scale: 8.0 +2023-03-08 22:31:00,928 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.3862, 5.5590, 2.9847, 5.3882, 5.2997, 5.6752, 5.4596, 2.9632], + device='cuda:2'), covar=tensor([0.0125, 0.0049, 0.0637, 0.0053, 0.0056, 0.0038, 0.0064, 0.0790], + device='cuda:2'), in_proj_covar=tensor([0.0069, 0.0059, 0.0085, 0.0072, 0.0067, 0.0056, 0.0071, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 22:31:34,819 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27070.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:31:56,212 INFO [train.py:898] (2/4) Epoch 8, batch 1650, loss[loss=0.1916, simple_loss=0.2687, pruned_loss=0.05724, over 18163.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.287, pruned_loss=0.06262, over 3567276.28 frames. ], batch size: 44, lr: 1.42e-02, grad_scale: 8.0 +2023-03-08 22:32:07,702 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6719, 3.0764, 4.1902, 4.0811, 2.9835, 4.6108, 3.9960, 2.9171], + device='cuda:2'), covar=tensor([0.0364, 0.1089, 0.0204, 0.0170, 0.1209, 0.0153, 0.0331, 0.0860], + device='cuda:2'), in_proj_covar=tensor([0.0173, 0.0212, 0.0129, 0.0130, 0.0207, 0.0173, 0.0185, 0.0186], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 22:32:31,984 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27118.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:32:53,451 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.332e+02 3.429e+02 4.052e+02 5.174e+02 1.566e+03, threshold=8.105e+02, percent-clipped=2.0 +2023-03-08 22:32:55,944 INFO [train.py:898] (2/4) Epoch 8, batch 1700, loss[loss=0.204, simple_loss=0.2747, pruned_loss=0.06665, over 18502.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2871, pruned_loss=0.06247, over 3582304.67 frames. ], batch size: 47, lr: 1.42e-02, grad_scale: 8.0 +2023-03-08 22:32:56,412 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6135, 3.3520, 1.9754, 4.3282, 2.9158, 4.4799, 2.1116, 3.8889], + device='cuda:2'), covar=tensor([0.0493, 0.0798, 0.1401, 0.0397, 0.0941, 0.0254, 0.1205, 0.0342], + device='cuda:2'), in_proj_covar=tensor([0.0177, 0.0205, 0.0175, 0.0212, 0.0175, 0.0203, 0.0183, 0.0178], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 22:33:07,182 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27148.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:33:55,118 INFO [train.py:898] (2/4) Epoch 8, batch 1750, loss[loss=0.2344, simple_loss=0.3134, pruned_loss=0.0777, over 18065.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2864, pruned_loss=0.06219, over 3588303.78 frames. ], batch size: 62, lr: 1.42e-02, grad_scale: 8.0 +2023-03-08 22:34:19,903 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27209.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:34:35,660 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-08 22:34:50,073 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-08 22:34:52,837 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.362e+02 3.433e+02 4.337e+02 5.594e+02 1.098e+03, threshold=8.674e+02, percent-clipped=7.0 +2023-03-08 22:34:55,193 INFO [train.py:898] (2/4) Epoch 8, batch 1800, loss[loss=0.223, simple_loss=0.3076, pruned_loss=0.06918, over 18344.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2851, pruned_loss=0.06129, over 3592582.61 frames. ], batch size: 56, lr: 1.42e-02, grad_scale: 8.0 +2023-03-08 22:35:19,962 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27260.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 22:35:29,132 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9112, 4.4229, 4.4786, 3.3409, 3.6842, 3.5091, 2.4535, 1.9929], + device='cuda:2'), covar=tensor([0.0150, 0.0149, 0.0068, 0.0230, 0.0344, 0.0183, 0.0712, 0.0909], + device='cuda:2'), in_proj_covar=tensor([0.0050, 0.0044, 0.0042, 0.0054, 0.0074, 0.0050, 0.0070, 0.0075], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 22:35:54,549 INFO [train.py:898] (2/4) Epoch 8, batch 1850, loss[loss=0.1901, simple_loss=0.2772, pruned_loss=0.05151, over 18245.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2854, pruned_loss=0.06158, over 3590647.74 frames. ], batch size: 45, lr: 1.42e-02, grad_scale: 8.0 +2023-03-08 22:36:08,509 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6431, 4.7494, 4.7616, 4.6094, 4.5331, 4.5926, 5.0128, 4.9098], + device='cuda:2'), covar=tensor([0.0067, 0.0076, 0.0080, 0.0083, 0.0071, 0.0135, 0.0068, 0.0100], + device='cuda:2'), in_proj_covar=tensor([0.0072, 0.0052, 0.0053, 0.0068, 0.0057, 0.0078, 0.0065, 0.0065], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0002, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 22:36:09,575 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6934, 2.9711, 4.2202, 3.9872, 2.5435, 4.6032, 4.0510, 2.7877], + device='cuda:2'), covar=tensor([0.0298, 0.1135, 0.0160, 0.0212, 0.1431, 0.0123, 0.0324, 0.0874], + device='cuda:2'), in_proj_covar=tensor([0.0172, 0.0206, 0.0126, 0.0127, 0.0203, 0.0169, 0.0182, 0.0182], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 22:36:14,404 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27306.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:36:19,362 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-08 22:36:39,276 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4567, 3.6008, 5.0652, 4.2110, 3.1692, 2.9157, 4.3686, 5.1067], + device='cuda:2'), covar=tensor([0.0845, 0.1585, 0.0059, 0.0316, 0.0837, 0.1036, 0.0289, 0.0164], + device='cuda:2'), in_proj_covar=tensor([0.0136, 0.0225, 0.0085, 0.0154, 0.0172, 0.0174, 0.0162, 0.0118], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0001, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-08 22:36:50,628 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.586e+02 3.923e+02 4.847e+02 5.995e+02 1.610e+03, threshold=9.695e+02, percent-clipped=7.0 +2023-03-08 22:36:53,084 INFO [train.py:898] (2/4) Epoch 8, batch 1900, loss[loss=0.1935, simple_loss=0.2651, pruned_loss=0.06091, over 18084.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2864, pruned_loss=0.06225, over 3595150.25 frames. ], batch size: 40, lr: 1.42e-02, grad_scale: 16.0 +2023-03-08 22:37:02,470 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3727, 2.8553, 4.0623, 3.8031, 2.5808, 4.4555, 3.9426, 2.7549], + device='cuda:2'), covar=tensor([0.0397, 0.1214, 0.0176, 0.0255, 0.1459, 0.0125, 0.0331, 0.0934], + device='cuda:2'), in_proj_covar=tensor([0.0176, 0.0212, 0.0128, 0.0129, 0.0206, 0.0172, 0.0186, 0.0186], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 22:37:15,260 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.23 vs. limit=5.0 +2023-03-08 22:37:51,477 INFO [train.py:898] (2/4) Epoch 8, batch 1950, loss[loss=0.18, simple_loss=0.2618, pruned_loss=0.04904, over 18295.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2871, pruned_loss=0.06258, over 3598236.94 frames. ], batch size: 49, lr: 1.42e-02, grad_scale: 16.0 +2023-03-08 22:37:54,555 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-08 22:38:12,935 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-08 22:38:47,713 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.257e+02 3.355e+02 4.079e+02 5.085e+02 1.650e+03, threshold=8.157e+02, percent-clipped=2.0 +2023-03-08 22:38:49,970 INFO [train.py:898] (2/4) Epoch 8, batch 2000, loss[loss=0.1858, simple_loss=0.2645, pruned_loss=0.05349, over 18424.00 frames. ], tot_loss[loss=0.206, simple_loss=0.287, pruned_loss=0.06248, over 3598773.31 frames. ], batch size: 48, lr: 1.42e-02, grad_scale: 16.0 +2023-03-08 22:38:57,001 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.84 vs. limit=2.0 +2023-03-08 22:39:25,425 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-03-08 22:39:29,205 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3575, 2.5338, 3.7714, 3.9424, 2.4026, 4.4266, 3.9033, 2.6030], + device='cuda:2'), covar=tensor([0.0426, 0.1501, 0.0260, 0.0191, 0.1513, 0.0115, 0.0386, 0.1034], + device='cuda:2'), in_proj_covar=tensor([0.0176, 0.0214, 0.0129, 0.0127, 0.0207, 0.0170, 0.0184, 0.0186], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 22:39:48,956 INFO [train.py:898] (2/4) Epoch 8, batch 2050, loss[loss=0.2278, simple_loss=0.3108, pruned_loss=0.07235, over 17148.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2876, pruned_loss=0.06303, over 3566401.85 frames. ], batch size: 78, lr: 1.41e-02, grad_scale: 8.0 +2023-03-08 22:40:07,055 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27504.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:40:35,802 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.2019, 5.3534, 2.7388, 5.1638, 4.9969, 5.4590, 5.1630, 2.7662], + device='cuda:2'), covar=tensor([0.0151, 0.0048, 0.0710, 0.0058, 0.0060, 0.0045, 0.0083, 0.0866], + device='cuda:2'), in_proj_covar=tensor([0.0071, 0.0059, 0.0086, 0.0074, 0.0069, 0.0057, 0.0072, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0005, 0.0004, 0.0003, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 22:40:46,828 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.547e+02 3.620e+02 4.320e+02 6.156e+02 2.106e+03, threshold=8.640e+02, percent-clipped=12.0 +2023-03-08 22:40:48,011 INFO [train.py:898] (2/4) Epoch 8, batch 2100, loss[loss=0.1874, simple_loss=0.2616, pruned_loss=0.05659, over 18393.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2876, pruned_loss=0.06304, over 3566706.98 frames. ], batch size: 42, lr: 1.41e-02, grad_scale: 8.0 +2023-03-08 22:41:12,775 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27560.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 22:41:47,086 INFO [train.py:898] (2/4) Epoch 8, batch 2150, loss[loss=0.2049, simple_loss=0.2865, pruned_loss=0.06159, over 17086.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2878, pruned_loss=0.06273, over 3569594.26 frames. ], batch size: 78, lr: 1.41e-02, grad_scale: 8.0 +2023-03-08 22:42:07,713 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27606.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:42:09,888 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27608.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 22:42:45,313 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.528e+02 3.451e+02 4.193e+02 5.044e+02 8.620e+02, threshold=8.386e+02, percent-clipped=0.0 +2023-03-08 22:42:46,497 INFO [train.py:898] (2/4) Epoch 8, batch 2200, loss[loss=0.2413, simple_loss=0.3115, pruned_loss=0.08554, over 12885.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2882, pruned_loss=0.06294, over 3563941.89 frames. ], batch size: 130, lr: 1.41e-02, grad_scale: 8.0 +2023-03-08 22:43:03,783 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27654.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:43:46,124 INFO [train.py:898] (2/4) Epoch 8, batch 2250, loss[loss=0.205, simple_loss=0.298, pruned_loss=0.05605, over 18202.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.288, pruned_loss=0.06291, over 3564456.00 frames. ], batch size: 60, lr: 1.41e-02, grad_scale: 8.0 +2023-03-08 22:43:55,491 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.3962, 5.5771, 3.0610, 5.3324, 5.2848, 5.6926, 5.4740, 3.0226], + device='cuda:2'), covar=tensor([0.0131, 0.0039, 0.0621, 0.0060, 0.0048, 0.0035, 0.0079, 0.0806], + device='cuda:2'), in_proj_covar=tensor([0.0070, 0.0059, 0.0085, 0.0073, 0.0068, 0.0056, 0.0071, 0.0087], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0005, 0.0004, 0.0003, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 22:44:00,211 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6009, 2.8487, 2.4807, 2.7059, 3.5300, 3.5265, 3.1834, 3.0609], + device='cuda:2'), covar=tensor([0.0169, 0.0250, 0.0582, 0.0274, 0.0189, 0.0124, 0.0258, 0.0248], + device='cuda:2'), in_proj_covar=tensor([0.0109, 0.0093, 0.0144, 0.0123, 0.0092, 0.0073, 0.0121, 0.0118], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 22:44:12,304 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6366, 2.0232, 2.7124, 2.6985, 3.5757, 5.2823, 4.4979, 4.2601], + device='cuda:2'), covar=tensor([0.1012, 0.1836, 0.2153, 0.1255, 0.1633, 0.0060, 0.0377, 0.0339], + device='cuda:2'), in_proj_covar=tensor([0.0208, 0.0263, 0.0267, 0.0229, 0.0338, 0.0155, 0.0231, 0.0178], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-08 22:44:15,670 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27714.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 22:44:23,894 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-03-08 22:44:44,345 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.230e+02 4.253e+02 4.744e+02 6.262e+02 1.251e+03, threshold=9.489e+02, percent-clipped=5.0 +2023-03-08 22:44:45,487 INFO [train.py:898] (2/4) Epoch 8, batch 2300, loss[loss=0.1987, simple_loss=0.2731, pruned_loss=0.06212, over 18244.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.288, pruned_loss=0.06292, over 3567843.13 frames. ], batch size: 45, lr: 1.41e-02, grad_scale: 8.0 +2023-03-08 22:44:49,390 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6199, 5.2669, 5.2893, 5.2350, 4.8403, 5.1023, 4.4952, 5.0824], + device='cuda:2'), covar=tensor([0.0212, 0.0247, 0.0171, 0.0261, 0.0305, 0.0247, 0.1139, 0.0243], + device='cuda:2'), in_proj_covar=tensor([0.0154, 0.0198, 0.0185, 0.0205, 0.0195, 0.0207, 0.0268, 0.0188], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-08 22:45:28,261 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27775.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 22:45:44,256 INFO [train.py:898] (2/4) Epoch 8, batch 2350, loss[loss=0.2041, simple_loss=0.2913, pruned_loss=0.05847, over 18368.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2884, pruned_loss=0.06326, over 3562871.38 frames. ], batch size: 56, lr: 1.41e-02, grad_scale: 8.0 +2023-03-08 22:46:03,028 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27804.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:46:42,183 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.997e+02 3.557e+02 4.056e+02 4.769e+02 1.044e+03, threshold=8.112e+02, percent-clipped=2.0 +2023-03-08 22:46:43,702 INFO [train.py:898] (2/4) Epoch 8, batch 2400, loss[loss=0.227, simple_loss=0.2978, pruned_loss=0.0781, over 18266.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.287, pruned_loss=0.06255, over 3570129.84 frames. ], batch size: 47, lr: 1.41e-02, grad_scale: 8.0 +2023-03-08 22:46:51,851 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-08 22:46:59,370 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=27852.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:47:10,161 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7489, 3.7203, 5.0925, 3.2361, 4.2506, 2.6299, 2.9471, 1.9564], + device='cuda:2'), covar=tensor([0.0890, 0.0687, 0.0059, 0.0493, 0.0558, 0.2001, 0.2360, 0.1627], + device='cuda:2'), in_proj_covar=tensor([0.0181, 0.0196, 0.0097, 0.0151, 0.0209, 0.0233, 0.0252, 0.0193], + device='cuda:2'), out_proj_covar=tensor([1.6196e-04, 1.8186e-04, 9.1459e-05, 1.4005e-04, 1.9417e-04, 2.1867e-04, + 2.3295e-04, 1.8204e-04], device='cuda:2') +2023-03-08 22:47:42,091 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1108, 5.0905, 4.3677, 5.0160, 5.0102, 4.5258, 4.9687, 4.6082], + device='cuda:2'), covar=tensor([0.0543, 0.0524, 0.2036, 0.0916, 0.0639, 0.0534, 0.0525, 0.1130], + device='cuda:2'), in_proj_covar=tensor([0.0368, 0.0424, 0.0578, 0.0337, 0.0309, 0.0390, 0.0412, 0.0529], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-08 22:47:42,916 INFO [train.py:898] (2/4) Epoch 8, batch 2450, loss[loss=0.2086, simple_loss=0.2884, pruned_loss=0.06439, over 18215.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2873, pruned_loss=0.06234, over 3578570.91 frames. ], batch size: 60, lr: 1.40e-02, grad_scale: 8.0 +2023-03-08 22:48:08,347 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7570, 3.1919, 4.4381, 4.2008, 2.7252, 4.7998, 4.1253, 2.8737], + device='cuda:2'), covar=tensor([0.0306, 0.0942, 0.0190, 0.0163, 0.1177, 0.0108, 0.0333, 0.0865], + device='cuda:2'), in_proj_covar=tensor([0.0175, 0.0209, 0.0129, 0.0126, 0.0203, 0.0169, 0.0184, 0.0185], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 22:48:41,272 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.646e+02 3.668e+02 4.454e+02 5.440e+02 2.886e+03, threshold=8.907e+02, percent-clipped=9.0 +2023-03-08 22:48:42,416 INFO [train.py:898] (2/4) Epoch 8, batch 2500, loss[loss=0.1894, simple_loss=0.2818, pruned_loss=0.04851, over 18257.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.287, pruned_loss=0.06269, over 3561463.68 frames. ], batch size: 45, lr: 1.40e-02, grad_scale: 8.0 +2023-03-08 22:49:41,159 INFO [train.py:898] (2/4) Epoch 8, batch 2550, loss[loss=0.1752, simple_loss=0.2509, pruned_loss=0.04969, over 17656.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2868, pruned_loss=0.06278, over 3555514.03 frames. ], batch size: 39, lr: 1.40e-02, grad_scale: 8.0 +2023-03-08 22:50:20,672 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-03-08 22:50:37,311 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2689, 5.1485, 5.3216, 5.3876, 5.1952, 5.9369, 5.5552, 5.3532], + device='cuda:2'), covar=tensor([0.0920, 0.0593, 0.0734, 0.0566, 0.1372, 0.0708, 0.0644, 0.1395], + device='cuda:2'), in_proj_covar=tensor([0.0283, 0.0217, 0.0224, 0.0223, 0.0264, 0.0324, 0.0213, 0.0314], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-08 22:50:45,007 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.293e+02 3.705e+02 4.339e+02 5.125e+02 8.562e+02, threshold=8.678e+02, percent-clipped=0.0 +2023-03-08 22:50:45,044 INFO [train.py:898] (2/4) Epoch 8, batch 2600, loss[loss=0.2181, simple_loss=0.3063, pruned_loss=0.06499, over 18101.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2879, pruned_loss=0.06333, over 3541422.77 frames. ], batch size: 62, lr: 1.40e-02, grad_scale: 4.0 +2023-03-08 22:51:22,144 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28070.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 22:51:43,977 INFO [train.py:898] (2/4) Epoch 8, batch 2650, loss[loss=0.209, simple_loss=0.2852, pruned_loss=0.06641, over 18284.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2882, pruned_loss=0.06288, over 3545053.31 frames. ], batch size: 49, lr: 1.40e-02, grad_scale: 4.0 +2023-03-08 22:51:47,983 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.18 vs. limit=5.0 +2023-03-08 22:52:42,846 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.340e+02 3.798e+02 4.426e+02 5.240e+02 9.211e+02, threshold=8.852e+02, percent-clipped=2.0 +2023-03-08 22:52:42,871 INFO [train.py:898] (2/4) Epoch 8, batch 2700, loss[loss=0.1951, simple_loss=0.2885, pruned_loss=0.05083, over 18610.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.2879, pruned_loss=0.06247, over 3551869.13 frames. ], batch size: 52, lr: 1.40e-02, grad_scale: 4.0 +2023-03-08 22:53:41,234 INFO [train.py:898] (2/4) Epoch 8, batch 2750, loss[loss=0.2049, simple_loss=0.2812, pruned_loss=0.06431, over 18285.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2881, pruned_loss=0.06267, over 3552145.35 frames. ], batch size: 49, lr: 1.40e-02, grad_scale: 4.0 +2023-03-08 22:54:29,091 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28229.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:54:40,880 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.331e+02 3.538e+02 4.348e+02 5.069e+02 1.374e+03, threshold=8.696e+02, percent-clipped=5.0 +2023-03-08 22:54:40,916 INFO [train.py:898] (2/4) Epoch 8, batch 2800, loss[loss=0.2267, simple_loss=0.309, pruned_loss=0.07219, over 18319.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2865, pruned_loss=0.06167, over 3570462.65 frames. ], batch size: 56, lr: 1.40e-02, grad_scale: 8.0 +2023-03-08 22:54:59,989 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28255.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 22:55:37,530 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28287.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:55:39,257 INFO [train.py:898] (2/4) Epoch 8, batch 2850, loss[loss=0.2508, simple_loss=0.3263, pruned_loss=0.08766, over 18642.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2867, pruned_loss=0.06187, over 3572967.04 frames. ], batch size: 52, lr: 1.39e-02, grad_scale: 8.0 +2023-03-08 22:55:40,858 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28290.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:55:55,512 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1971, 4.2164, 2.8027, 4.3434, 5.2628, 2.6805, 3.7261, 3.5915], + device='cuda:2'), covar=tensor([0.0072, 0.0909, 0.1185, 0.0416, 0.0034, 0.1069, 0.0562, 0.0744], + device='cuda:2'), in_proj_covar=tensor([0.0099, 0.0203, 0.0180, 0.0179, 0.0079, 0.0169, 0.0191, 0.0191], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0002, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 22:56:11,256 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28316.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 22:56:24,268 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-08 22:56:26,631 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.08 vs. limit=2.0 +2023-03-08 22:56:38,155 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.182e+02 3.670e+02 4.425e+02 5.374e+02 1.143e+03, threshold=8.851e+02, percent-clipped=3.0 +2023-03-08 22:56:38,180 INFO [train.py:898] (2/4) Epoch 8, batch 2900, loss[loss=0.207, simple_loss=0.294, pruned_loss=0.06004, over 17727.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2867, pruned_loss=0.06187, over 3569072.75 frames. ], batch size: 70, lr: 1.39e-02, grad_scale: 8.0 +2023-03-08 22:56:49,620 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28348.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:57:14,655 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28370.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 22:57:36,815 INFO [train.py:898] (2/4) Epoch 8, batch 2950, loss[loss=0.2193, simple_loss=0.3043, pruned_loss=0.0671, over 18312.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2861, pruned_loss=0.06173, over 3563656.70 frames. ], batch size: 54, lr: 1.39e-02, grad_scale: 8.0 +2023-03-08 22:58:11,172 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28418.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 22:58:36,003 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.441e+02 3.351e+02 4.042e+02 5.395e+02 3.528e+03, threshold=8.084e+02, percent-clipped=8.0 +2023-03-08 22:58:36,028 INFO [train.py:898] (2/4) Epoch 8, batch 3000, loss[loss=0.1744, simple_loss=0.2543, pruned_loss=0.04728, over 18298.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2855, pruned_loss=0.0613, over 3569913.67 frames. ], batch size: 49, lr: 1.39e-02, grad_scale: 8.0 +2023-03-08 22:58:36,028 INFO [train.py:923] (2/4) Computing validation loss +2023-03-08 22:58:42,729 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2619, 3.0457, 3.0614, 2.6596, 3.0374, 2.4661, 2.5143, 3.2565], + device='cuda:2'), covar=tensor([0.0039, 0.0067, 0.0062, 0.0116, 0.0061, 0.0138, 0.0151, 0.0044], + device='cuda:2'), in_proj_covar=tensor([0.0075, 0.0100, 0.0088, 0.0136, 0.0089, 0.0134, 0.0141, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-08 22:58:47,830 INFO [train.py:932] (2/4) Epoch 8, validation: loss=0.165, simple_loss=0.2676, pruned_loss=0.03118, over 944034.00 frames. +2023-03-08 22:58:47,831 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-08 22:59:02,943 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0507, 5.1664, 2.8647, 5.0073, 4.8534, 5.2442, 4.9766, 2.4848], + device='cuda:2'), covar=tensor([0.0157, 0.0069, 0.0621, 0.0073, 0.0075, 0.0066, 0.0106, 0.1031], + device='cuda:2'), in_proj_covar=tensor([0.0070, 0.0060, 0.0085, 0.0074, 0.0069, 0.0057, 0.0071, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 22:59:42,663 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28486.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 22:59:43,775 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9759, 2.9166, 2.0480, 3.3155, 2.4629, 3.1730, 2.0854, 2.9042], + device='cuda:2'), covar=tensor([0.0477, 0.0611, 0.0969, 0.0491, 0.0643, 0.0267, 0.0978, 0.0330], + device='cuda:2'), in_proj_covar=tensor([0.0173, 0.0200, 0.0169, 0.0208, 0.0170, 0.0204, 0.0182, 0.0173], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-08 22:59:46,139 INFO [train.py:898] (2/4) Epoch 8, batch 3050, loss[loss=0.2598, simple_loss=0.3271, pruned_loss=0.0962, over 12209.00 frames. ], tot_loss[loss=0.205, simple_loss=0.286, pruned_loss=0.06197, over 3548420.43 frames. ], batch size: 130, lr: 1.39e-02, grad_scale: 8.0 +2023-03-08 22:59:49,893 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28492.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:00:44,056 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.579e+02 3.667e+02 4.403e+02 5.909e+02 1.221e+03, threshold=8.806e+02, percent-clipped=6.0 +2023-03-08 23:00:44,082 INFO [train.py:898] (2/4) Epoch 8, batch 3100, loss[loss=0.1758, simple_loss=0.2591, pruned_loss=0.04622, over 18279.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2853, pruned_loss=0.0614, over 3564786.77 frames. ], batch size: 47, lr: 1.39e-02, grad_scale: 8.0 +2023-03-08 23:00:54,259 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28547.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:00:57,780 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7798, 3.7963, 4.8199, 2.8404, 4.0617, 2.4928, 2.9515, 1.8994], + device='cuda:2'), covar=tensor([0.0855, 0.0634, 0.0078, 0.0602, 0.0516, 0.2103, 0.2194, 0.1699], + device='cuda:2'), in_proj_covar=tensor([0.0187, 0.0200, 0.0100, 0.0157, 0.0217, 0.0238, 0.0261, 0.0200], + device='cuda:2'), out_proj_covar=tensor([1.6709e-04, 1.8467e-04, 9.4028e-05, 1.4403e-04, 2.0154e-04, 2.2261e-04, + 2.4032e-04, 1.8771e-04], device='cuda:2') +2023-03-08 23:01:01,064 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28553.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:01:05,722 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8361, 3.7941, 3.6017, 3.1354, 3.5439, 2.9909, 2.9747, 3.8251], + device='cuda:2'), covar=tensor([0.0036, 0.0060, 0.0066, 0.0122, 0.0076, 0.0155, 0.0171, 0.0056], + device='cuda:2'), in_proj_covar=tensor([0.0073, 0.0097, 0.0087, 0.0133, 0.0088, 0.0130, 0.0139, 0.0074], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-08 23:01:13,643 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.99 vs. limit=5.0 +2023-03-08 23:01:39,209 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28585.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:01:43,475 INFO [train.py:898] (2/4) Epoch 8, batch 3150, loss[loss=0.1611, simple_loss=0.2397, pruned_loss=0.04131, over 18488.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2857, pruned_loss=0.06169, over 3574200.23 frames. ], batch size: 44, lr: 1.39e-02, grad_scale: 8.0 +2023-03-08 23:02:10,532 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28611.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 23:02:43,327 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.977e+02 3.448e+02 4.056e+02 5.168e+02 1.166e+03, threshold=8.112e+02, percent-clipped=4.0 +2023-03-08 23:02:43,352 INFO [train.py:898] (2/4) Epoch 8, batch 3200, loss[loss=0.1877, simple_loss=0.2717, pruned_loss=0.05184, over 18406.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2853, pruned_loss=0.06094, over 3583470.17 frames. ], batch size: 48, lr: 1.39e-02, grad_scale: 8.0 +2023-03-08 23:02:48,217 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28643.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:03:32,973 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-08 23:03:42,520 INFO [train.py:898] (2/4) Epoch 8, batch 3250, loss[loss=0.21, simple_loss=0.2944, pruned_loss=0.06284, over 17208.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2852, pruned_loss=0.06101, over 3590024.28 frames. ], batch size: 78, lr: 1.39e-02, grad_scale: 4.0 +2023-03-08 23:04:04,711 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9218, 2.6336, 4.1807, 4.1089, 2.6453, 4.5377, 3.8002, 2.3684], + device='cuda:2'), covar=tensor([0.0306, 0.1460, 0.0189, 0.0164, 0.1375, 0.0172, 0.0429, 0.1277], + device='cuda:2'), in_proj_covar=tensor([0.0175, 0.0206, 0.0130, 0.0128, 0.0203, 0.0172, 0.0187, 0.0184], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 23:04:42,267 INFO [train.py:898] (2/4) Epoch 8, batch 3300, loss[loss=0.2146, simple_loss=0.2998, pruned_loss=0.06464, over 18343.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.2855, pruned_loss=0.06109, over 3593312.52 frames. ], batch size: 56, lr: 1.38e-02, grad_scale: 4.0 +2023-03-08 23:04:43,381 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.421e+02 3.488e+02 4.190e+02 5.293e+02 2.938e+03, threshold=8.380e+02, percent-clipped=5.0 +2023-03-08 23:04:43,816 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1601, 2.6131, 2.3091, 2.5157, 3.3242, 3.2372, 2.7938, 2.6246], + device='cuda:2'), covar=tensor([0.0311, 0.0307, 0.0655, 0.0410, 0.0184, 0.0152, 0.0339, 0.0364], + device='cuda:2'), in_proj_covar=tensor([0.0107, 0.0093, 0.0146, 0.0126, 0.0092, 0.0073, 0.0120, 0.0119], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 23:04:50,453 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9976, 5.0771, 5.0798, 4.8023, 4.8256, 4.8544, 5.2414, 5.1152], + device='cuda:2'), covar=tensor([0.0071, 0.0080, 0.0062, 0.0103, 0.0069, 0.0105, 0.0093, 0.0123], + device='cuda:2'), in_proj_covar=tensor([0.0074, 0.0055, 0.0056, 0.0070, 0.0058, 0.0081, 0.0067, 0.0067], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 23:05:21,260 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.1017, 5.1679, 2.8145, 4.9619, 4.7090, 5.2200, 4.9393, 2.4406], + device='cuda:2'), covar=tensor([0.0137, 0.0060, 0.0666, 0.0073, 0.0078, 0.0060, 0.0105, 0.1042], + device='cuda:2'), in_proj_covar=tensor([0.0070, 0.0060, 0.0085, 0.0074, 0.0070, 0.0056, 0.0071, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 23:05:22,349 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6820, 4.6844, 4.6482, 4.4829, 4.5165, 4.5830, 4.8773, 4.7957], + device='cuda:2'), covar=tensor([0.0062, 0.0072, 0.0092, 0.0092, 0.0071, 0.0094, 0.0070, 0.0088], + device='cuda:2'), in_proj_covar=tensor([0.0072, 0.0054, 0.0055, 0.0068, 0.0056, 0.0079, 0.0066, 0.0066], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0002, 0.0003, 0.0002, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 23:05:41,116 INFO [train.py:898] (2/4) Epoch 8, batch 3350, loss[loss=0.2185, simple_loss=0.3022, pruned_loss=0.06738, over 18488.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2858, pruned_loss=0.0614, over 3589245.61 frames. ], batch size: 59, lr: 1.38e-02, grad_scale: 4.0 +2023-03-08 23:06:40,388 INFO [train.py:898] (2/4) Epoch 8, batch 3400, loss[loss=0.2179, simple_loss=0.3012, pruned_loss=0.0673, over 18395.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2857, pruned_loss=0.06124, over 3594638.33 frames. ], batch size: 52, lr: 1.38e-02, grad_scale: 4.0 +2023-03-08 23:06:40,719 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3952, 5.3185, 4.9097, 5.3835, 5.3277, 4.7399, 5.2208, 5.0576], + device='cuda:2'), covar=tensor([0.0449, 0.0486, 0.1545, 0.0658, 0.0551, 0.0437, 0.0410, 0.0875], + device='cuda:2'), in_proj_covar=tensor([0.0361, 0.0422, 0.0555, 0.0331, 0.0300, 0.0382, 0.0406, 0.0510], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-08 23:06:41,527 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.584e+02 3.993e+02 4.817e+02 5.813e+02 1.322e+03, threshold=9.634e+02, percent-clipped=7.0 +2023-03-08 23:06:41,951 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6964, 4.4758, 4.6559, 3.3025, 3.6627, 3.7162, 2.6697, 2.2980], + device='cuda:2'), covar=tensor([0.0233, 0.0189, 0.0049, 0.0264, 0.0305, 0.0181, 0.0692, 0.0871], + device='cuda:2'), in_proj_covar=tensor([0.0052, 0.0044, 0.0042, 0.0056, 0.0075, 0.0052, 0.0069, 0.0075], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 23:06:44,142 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28842.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:06:50,914 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28848.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:07:07,362 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28861.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:07:35,092 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28885.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:07:39,397 INFO [train.py:898] (2/4) Epoch 8, batch 3450, loss[loss=0.1824, simple_loss=0.2756, pruned_loss=0.04458, over 18483.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2853, pruned_loss=0.06085, over 3598140.02 frames. ], batch size: 53, lr: 1.38e-02, grad_scale: 4.0 +2023-03-08 23:07:44,839 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=2.03 vs. limit=2.0 +2023-03-08 23:08:06,145 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28911.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 23:08:18,944 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28922.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:08:31,115 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28933.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:08:38,991 INFO [train.py:898] (2/4) Epoch 8, batch 3500, loss[loss=0.1918, simple_loss=0.2756, pruned_loss=0.05401, over 18418.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2845, pruned_loss=0.06035, over 3607176.11 frames. ], batch size: 48, lr: 1.38e-02, grad_scale: 4.0 +2023-03-08 23:08:40,152 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.088e+02 3.743e+02 4.474e+02 5.691e+02 1.966e+03, threshold=8.949e+02, percent-clipped=4.0 +2023-03-08 23:08:43,914 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28943.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:09:02,281 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28959.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 23:09:20,868 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8736, 3.7898, 3.6027, 3.1172, 3.5963, 2.8048, 3.0027, 3.8210], + device='cuda:2'), covar=tensor([0.0027, 0.0059, 0.0064, 0.0119, 0.0059, 0.0144, 0.0140, 0.0041], + device='cuda:2'), in_proj_covar=tensor([0.0073, 0.0099, 0.0090, 0.0135, 0.0088, 0.0132, 0.0139, 0.0075], + device='cuda:2'), out_proj_covar=tensor([9.9216e-05, 1.4472e-04, 1.3043e-04, 2.0716e-04, 1.2515e-04, 1.9863e-04, + 2.0874e-04, 1.0596e-04], device='cuda:2') +2023-03-08 23:09:35,650 INFO [train.py:898] (2/4) Epoch 8, batch 3550, loss[loss=0.1868, simple_loss=0.263, pruned_loss=0.05528, over 18490.00 frames. ], tot_loss[loss=0.2029, simple_loss=0.2845, pruned_loss=0.06067, over 3598312.62 frames. ], batch size: 47, lr: 1.38e-02, grad_scale: 4.0 +2023-03-08 23:09:37,921 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=28991.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:09:50,552 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29002.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:10:30,401 INFO [train.py:898] (2/4) Epoch 8, batch 3600, loss[loss=0.1865, simple_loss=0.2667, pruned_loss=0.05318, over 18535.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2842, pruned_loss=0.06058, over 3594881.95 frames. ], batch size: 49, lr: 1.38e-02, grad_scale: 8.0 +2023-03-08 23:10:31,431 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.295e+02 3.435e+02 4.194e+02 5.068e+02 1.055e+03, threshold=8.389e+02, percent-clipped=1.0 +2023-03-08 23:10:38,063 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9111, 4.8808, 4.4805, 4.8788, 4.8282, 4.2456, 4.7154, 4.5614], + device='cuda:2'), covar=tensor([0.0401, 0.0488, 0.1350, 0.0598, 0.0508, 0.0484, 0.0447, 0.0912], + device='cuda:2'), in_proj_covar=tensor([0.0366, 0.0426, 0.0572, 0.0335, 0.0307, 0.0387, 0.0412, 0.0522], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-08 23:10:56,810 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29063.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:11:37,010 INFO [train.py:898] (2/4) Epoch 9, batch 0, loss[loss=0.2001, simple_loss=0.2905, pruned_loss=0.05483, over 18575.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2905, pruned_loss=0.05483, over 18575.00 frames. ], batch size: 54, lr: 1.30e-02, grad_scale: 8.0 +2023-03-08 23:11:37,010 INFO [train.py:923] (2/4) Computing validation loss +2023-03-08 23:11:48,955 INFO [train.py:932] (2/4) Epoch 9, validation: loss=0.1674, simple_loss=0.2698, pruned_loss=0.03254, over 944034.00 frames. +2023-03-08 23:11:48,956 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-08 23:12:38,173 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8652, 5.4779, 5.4581, 5.3705, 5.0621, 5.3093, 4.7560, 5.3129], + device='cuda:2'), covar=tensor([0.0175, 0.0211, 0.0139, 0.0254, 0.0250, 0.0196, 0.0880, 0.0200], + device='cuda:2'), in_proj_covar=tensor([0.0156, 0.0197, 0.0187, 0.0213, 0.0197, 0.0207, 0.0264, 0.0190], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-08 23:12:44,123 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4585, 2.1835, 2.0512, 2.0093, 2.4584, 2.3857, 2.2609, 2.1851], + device='cuda:2'), covar=tensor([0.0208, 0.0242, 0.0514, 0.0388, 0.0201, 0.0144, 0.0367, 0.0288], + device='cuda:2'), in_proj_covar=tensor([0.0106, 0.0093, 0.0146, 0.0125, 0.0091, 0.0075, 0.0120, 0.0115], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 23:12:48,242 INFO [train.py:898] (2/4) Epoch 9, batch 50, loss[loss=0.2726, simple_loss=0.3314, pruned_loss=0.1069, over 11924.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2855, pruned_loss=0.06027, over 800213.21 frames. ], batch size: 130, lr: 1.30e-02, grad_scale: 8.0 +2023-03-08 23:13:08,328 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.388e+02 3.677e+02 4.281e+02 4.949e+02 1.360e+03, threshold=8.563e+02, percent-clipped=6.0 +2023-03-08 23:13:10,926 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29142.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:13:18,817 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29148.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:13:25,810 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5466, 3.3735, 1.9567, 4.3180, 2.8184, 4.4630, 2.2453, 3.9968], + device='cuda:2'), covar=tensor([0.0523, 0.0786, 0.1488, 0.0427, 0.0997, 0.0241, 0.1251, 0.0354], + device='cuda:2'), in_proj_covar=tensor([0.0174, 0.0202, 0.0172, 0.0210, 0.0170, 0.0207, 0.0185, 0.0174], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 23:13:47,195 INFO [train.py:898] (2/4) Epoch 9, batch 100, loss[loss=0.1935, simple_loss=0.2891, pruned_loss=0.04898, over 18410.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2818, pruned_loss=0.05908, over 1433869.38 frames. ], batch size: 52, lr: 1.30e-02, grad_scale: 8.0 +2023-03-08 23:14:07,669 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29190.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:14:14,372 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29196.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:14:36,166 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4131, 4.3674, 2.5790, 4.4641, 5.4831, 2.6105, 4.2092, 4.0172], + device='cuda:2'), covar=tensor([0.0059, 0.1094, 0.1467, 0.0449, 0.0040, 0.1245, 0.0524, 0.0691], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0209, 0.0184, 0.0184, 0.0081, 0.0168, 0.0198, 0.0194], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 23:14:39,431 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29217.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:14:46,050 INFO [train.py:898] (2/4) Epoch 9, batch 150, loss[loss=0.1973, simple_loss=0.2855, pruned_loss=0.05449, over 18481.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2831, pruned_loss=0.05946, over 1916223.15 frames. ], batch size: 53, lr: 1.30e-02, grad_scale: 8.0 +2023-03-08 23:14:56,827 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-08 23:15:00,942 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29236.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:15:05,100 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.617e+02 3.595e+02 4.206e+02 4.990e+02 1.116e+03, threshold=8.412e+02, percent-clipped=1.0 +2023-03-08 23:15:44,239 INFO [train.py:898] (2/4) Epoch 9, batch 200, loss[loss=0.2172, simple_loss=0.2999, pruned_loss=0.0673, over 18297.00 frames. ], tot_loss[loss=0.2005, simple_loss=0.2823, pruned_loss=0.05933, over 2291181.75 frames. ], batch size: 57, lr: 1.30e-02, grad_scale: 8.0 +2023-03-08 23:15:52,813 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2795, 5.1748, 5.3720, 5.2609, 5.1888, 5.9563, 5.6608, 5.4383], + device='cuda:2'), covar=tensor([0.0824, 0.0603, 0.0600, 0.0651, 0.1361, 0.0735, 0.0578, 0.1433], + device='cuda:2'), in_proj_covar=tensor([0.0283, 0.0218, 0.0225, 0.0224, 0.0268, 0.0327, 0.0211, 0.0310], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-08 23:16:13,825 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29297.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:16:44,290 INFO [train.py:898] (2/4) Epoch 9, batch 250, loss[loss=0.1839, simple_loss=0.2585, pruned_loss=0.05467, over 17724.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.282, pruned_loss=0.0591, over 2574055.34 frames. ], batch size: 39, lr: 1.30e-02, grad_scale: 8.0 +2023-03-08 23:17:03,791 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.373e+02 3.539e+02 4.403e+02 5.304e+02 1.212e+03, threshold=8.805e+02, percent-clipped=3.0 +2023-03-08 23:17:19,013 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.1751, 2.8104, 4.0391, 3.5769, 2.8937, 2.8091, 3.6875, 4.0399], + device='cuda:2'), covar=tensor([0.0849, 0.1487, 0.0102, 0.0305, 0.0684, 0.0861, 0.0304, 0.0225], + device='cuda:2'), in_proj_covar=tensor([0.0131, 0.0222, 0.0085, 0.0148, 0.0166, 0.0168, 0.0157, 0.0120], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-08 23:17:25,607 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29358.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:17:25,856 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2182, 4.2875, 2.7142, 4.3275, 5.3081, 2.8252, 3.9159, 3.8278], + device='cuda:2'), covar=tensor([0.0085, 0.1074, 0.1382, 0.0534, 0.0057, 0.1103, 0.0595, 0.0779], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0208, 0.0181, 0.0182, 0.0080, 0.0167, 0.0195, 0.0191], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 23:17:43,339 INFO [train.py:898] (2/4) Epoch 9, batch 300, loss[loss=0.1917, simple_loss=0.2771, pruned_loss=0.05317, over 18392.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2818, pruned_loss=0.05944, over 2803926.99 frames. ], batch size: 48, lr: 1.30e-02, grad_scale: 8.0 +2023-03-08 23:18:25,101 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4446, 5.3727, 4.9584, 5.4185, 5.3511, 4.7783, 5.3046, 5.0072], + device='cuda:2'), covar=tensor([0.0406, 0.0454, 0.1529, 0.0656, 0.0510, 0.0468, 0.0373, 0.0937], + device='cuda:2'), in_proj_covar=tensor([0.0365, 0.0425, 0.0581, 0.0341, 0.0310, 0.0390, 0.0410, 0.0523], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-08 23:18:27,452 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0384, 3.4160, 3.4044, 2.8398, 2.9773, 2.8912, 2.4067, 2.1830], + device='cuda:2'), covar=tensor([0.0231, 0.0170, 0.0126, 0.0272, 0.0359, 0.0236, 0.0631, 0.0789], + device='cuda:2'), in_proj_covar=tensor([0.0053, 0.0044, 0.0042, 0.0056, 0.0075, 0.0052, 0.0068, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 23:18:42,972 INFO [train.py:898] (2/4) Epoch 9, batch 350, loss[loss=0.1643, simple_loss=0.2481, pruned_loss=0.04032, over 17654.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.282, pruned_loss=0.05926, over 2974139.38 frames. ], batch size: 39, lr: 1.30e-02, grad_scale: 8.0 +2023-03-08 23:18:50,760 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-03-08 23:19:02,291 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.167e+02 3.494e+02 4.009e+02 5.079e+02 1.236e+03, threshold=8.018e+02, percent-clipped=2.0 +2023-03-08 23:19:41,978 INFO [train.py:898] (2/4) Epoch 9, batch 400, loss[loss=0.2074, simple_loss=0.2974, pruned_loss=0.05873, over 18339.00 frames. ], tot_loss[loss=0.2, simple_loss=0.282, pruned_loss=0.05902, over 3106637.66 frames. ], batch size: 55, lr: 1.29e-02, grad_scale: 8.0 +2023-03-08 23:20:10,820 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.6416, 5.5653, 5.1823, 5.6049, 5.5459, 4.9046, 5.4931, 5.1632], + device='cuda:2'), covar=tensor([0.0327, 0.0372, 0.1171, 0.0552, 0.0427, 0.0374, 0.0303, 0.0835], + device='cuda:2'), in_proj_covar=tensor([0.0361, 0.0421, 0.0568, 0.0338, 0.0306, 0.0383, 0.0405, 0.0514], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-08 23:20:34,069 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29517.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:20:40,653 INFO [train.py:898] (2/4) Epoch 9, batch 450, loss[loss=0.1996, simple_loss=0.2831, pruned_loss=0.05806, over 16306.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.281, pruned_loss=0.05862, over 3210069.14 frames. ], batch size: 94, lr: 1.29e-02, grad_scale: 8.0 +2023-03-08 23:20:41,638 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-03-08 23:20:46,571 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1305, 2.5810, 2.2713, 2.5045, 3.2659, 3.1196, 2.7968, 2.5730], + device='cuda:2'), covar=tensor([0.0287, 0.0299, 0.0709, 0.0347, 0.0227, 0.0272, 0.0386, 0.0395], + device='cuda:2'), in_proj_covar=tensor([0.0109, 0.0092, 0.0144, 0.0123, 0.0092, 0.0076, 0.0120, 0.0116], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 23:20:59,762 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.081e+02 3.569e+02 4.141e+02 5.253e+02 9.990e+02, threshold=8.283e+02, percent-clipped=5.0 +2023-03-08 23:21:12,962 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29551.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:21:15,092 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.1194, 4.0632, 3.8247, 4.0823, 4.0800, 3.6110, 4.0375, 3.8657], + device='cuda:2'), covar=tensor([0.0467, 0.0756, 0.1474, 0.0725, 0.0559, 0.0532, 0.0475, 0.0970], + device='cuda:2'), in_proj_covar=tensor([0.0365, 0.0426, 0.0577, 0.0343, 0.0308, 0.0386, 0.0409, 0.0521], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-08 23:21:30,031 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29565.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:21:40,060 INFO [train.py:898] (2/4) Epoch 9, batch 500, loss[loss=0.2046, simple_loss=0.2894, pruned_loss=0.05993, over 18501.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2811, pruned_loss=0.05818, over 3303070.20 frames. ], batch size: 53, lr: 1.29e-02, grad_scale: 8.0 +2023-03-08 23:21:43,806 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5407, 3.6290, 3.4011, 2.8597, 3.2458, 2.6374, 2.6435, 3.6090], + device='cuda:2'), covar=tensor([0.0036, 0.0047, 0.0057, 0.0111, 0.0077, 0.0147, 0.0151, 0.0050], + device='cuda:2'), in_proj_covar=tensor([0.0077, 0.0102, 0.0091, 0.0139, 0.0092, 0.0136, 0.0144, 0.0077], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-08 23:21:59,820 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0387, 4.3160, 2.2281, 4.3237, 5.2342, 2.2776, 3.5752, 3.7995], + device='cuda:2'), covar=tensor([0.0091, 0.0864, 0.1631, 0.0492, 0.0047, 0.1425, 0.0779, 0.0694], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0210, 0.0182, 0.0183, 0.0081, 0.0169, 0.0196, 0.0196], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0003, 0.0002, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 23:22:01,735 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29592.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:22:25,363 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29612.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:22:27,535 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1130, 5.1016, 4.6757, 5.0897, 5.1002, 4.4011, 4.9403, 4.7089], + device='cuda:2'), covar=tensor([0.0423, 0.0391, 0.1305, 0.0622, 0.0409, 0.0439, 0.0382, 0.0912], + device='cuda:2'), in_proj_covar=tensor([0.0373, 0.0430, 0.0579, 0.0345, 0.0312, 0.0390, 0.0413, 0.0526], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-08 23:22:29,347 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8401, 4.4793, 4.5805, 3.5532, 3.7237, 3.7009, 2.4317, 1.9084], + device='cuda:2'), covar=tensor([0.0187, 0.0144, 0.0057, 0.0238, 0.0261, 0.0183, 0.0774, 0.1073], + device='cuda:2'), in_proj_covar=tensor([0.0052, 0.0043, 0.0041, 0.0054, 0.0074, 0.0051, 0.0066, 0.0074], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 23:22:38,317 INFO [train.py:898] (2/4) Epoch 9, batch 550, loss[loss=0.1947, simple_loss=0.2796, pruned_loss=0.05488, over 18248.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2829, pruned_loss=0.05917, over 3363781.67 frames. ], batch size: 47, lr: 1.29e-02, grad_scale: 8.0 +2023-03-08 23:22:58,652 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.384e+02 3.638e+02 4.636e+02 5.710e+02 1.392e+03, threshold=9.272e+02, percent-clipped=6.0 +2023-03-08 23:23:19,559 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29658.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:23:24,160 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29662.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:23:37,407 INFO [train.py:898] (2/4) Epoch 9, batch 600, loss[loss=0.1827, simple_loss=0.2557, pruned_loss=0.05482, over 18245.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.282, pruned_loss=0.05939, over 3414339.21 frames. ], batch size: 45, lr: 1.29e-02, grad_scale: 8.0 +2023-03-08 23:24:04,135 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1882, 5.7865, 5.4342, 5.5426, 5.3131, 5.2617, 5.8124, 5.7040], + device='cuda:2'), covar=tensor([0.1246, 0.0598, 0.0521, 0.0613, 0.1429, 0.0698, 0.0591, 0.0658], + device='cuda:2'), in_proj_covar=tensor([0.0480, 0.0386, 0.0298, 0.0429, 0.0583, 0.0425, 0.0548, 0.0418], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-08 23:24:16,717 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29706.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:24:36,230 INFO [train.py:898] (2/4) Epoch 9, batch 650, loss[loss=0.2328, simple_loss=0.3163, pruned_loss=0.07465, over 18089.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2822, pruned_loss=0.05919, over 3452089.69 frames. ], batch size: 62, lr: 1.29e-02, grad_scale: 8.0 +2023-03-08 23:24:36,683 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29723.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 23:24:57,206 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.211e+02 3.283e+02 4.124e+02 5.101e+02 2.453e+03, threshold=8.247e+02, percent-clipped=7.0 +2023-03-08 23:25:35,095 INFO [train.py:898] (2/4) Epoch 9, batch 700, loss[loss=0.2168, simple_loss=0.2929, pruned_loss=0.07039, over 18284.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2825, pruned_loss=0.05909, over 3477440.58 frames. ], batch size: 49, lr: 1.29e-02, grad_scale: 8.0 +2023-03-08 23:26:00,574 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8371, 4.5767, 4.7972, 3.5441, 3.8193, 3.7413, 2.7077, 2.3120], + device='cuda:2'), covar=tensor([0.0258, 0.0179, 0.0054, 0.0248, 0.0343, 0.0189, 0.0692, 0.0923], + device='cuda:2'), in_proj_covar=tensor([0.0054, 0.0045, 0.0043, 0.0056, 0.0077, 0.0052, 0.0070, 0.0077], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-08 23:26:10,160 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29802.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:26:21,857 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-08 23:26:34,255 INFO [train.py:898] (2/4) Epoch 9, batch 750, loss[loss=0.2111, simple_loss=0.2875, pruned_loss=0.06739, over 18431.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2826, pruned_loss=0.05901, over 3515169.69 frames. ], batch size: 48, lr: 1.29e-02, grad_scale: 8.0 +2023-03-08 23:26:37,288 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-03-08 23:26:55,140 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.192e+02 3.216e+02 4.021e+02 4.703e+02 9.794e+02, threshold=8.041e+02, percent-clipped=2.0 +2023-03-08 23:27:03,472 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5090, 3.5441, 5.0980, 4.1629, 3.2303, 2.7937, 4.3539, 5.0932], + device='cuda:2'), covar=tensor([0.0804, 0.1476, 0.0079, 0.0322, 0.0777, 0.1074, 0.0311, 0.0178], + device='cuda:2'), in_proj_covar=tensor([0.0132, 0.0225, 0.0088, 0.0150, 0.0168, 0.0171, 0.0159, 0.0122], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-08 23:27:21,790 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29863.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:27:32,993 INFO [train.py:898] (2/4) Epoch 9, batch 800, loss[loss=0.2038, simple_loss=0.295, pruned_loss=0.05632, over 18470.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2834, pruned_loss=0.05918, over 3532779.96 frames. ], batch size: 59, lr: 1.29e-02, grad_scale: 8.0 +2023-03-08 23:27:46,367 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.74 vs. limit=2.0 +2023-03-08 23:27:56,520 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29892.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:27:59,189 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4982, 1.7448, 2.6676, 2.6550, 3.2993, 5.0126, 4.4261, 4.0402], + device='cuda:2'), covar=tensor([0.1049, 0.1945, 0.1928, 0.1217, 0.1558, 0.0072, 0.0334, 0.0343], + device='cuda:2'), in_proj_covar=tensor([0.0217, 0.0271, 0.0278, 0.0235, 0.0343, 0.0160, 0.0238, 0.0186], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-08 23:28:13,234 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-08 23:28:13,997 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29907.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:28:32,031 INFO [train.py:898] (2/4) Epoch 9, batch 850, loss[loss=0.2265, simple_loss=0.3051, pruned_loss=0.07395, over 18140.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2833, pruned_loss=0.05942, over 3546492.67 frames. ], batch size: 62, lr: 1.29e-02, grad_scale: 8.0 +2023-03-08 23:28:52,837 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.076e+02 3.769e+02 4.369e+02 5.199e+02 8.545e+02, threshold=8.737e+02, percent-clipped=2.0 +2023-03-08 23:28:53,054 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=29940.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:29:31,717 INFO [train.py:898] (2/4) Epoch 9, batch 900, loss[loss=0.1829, simple_loss=0.2721, pruned_loss=0.04682, over 16886.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2822, pruned_loss=0.05902, over 3552790.45 frames. ], batch size: 37, lr: 1.28e-02, grad_scale: 8.0 +2023-03-08 23:29:36,089 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-08 23:30:01,800 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5980, 5.3094, 5.2276, 5.2704, 4.8168, 5.1874, 4.4678, 5.0819], + device='cuda:2'), covar=tensor([0.0275, 0.0242, 0.0180, 0.0319, 0.0360, 0.0187, 0.1225, 0.0272], + device='cuda:2'), in_proj_covar=tensor([0.0157, 0.0199, 0.0187, 0.0214, 0.0199, 0.0206, 0.0272, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-08 23:30:11,300 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6045, 4.1573, 5.4166, 3.4532, 4.3879, 2.7896, 3.3009, 2.5021], + device='cuda:2'), covar=tensor([0.0612, 0.0653, 0.0045, 0.0516, 0.0512, 0.1920, 0.2021, 0.1502], + device='cuda:2'), in_proj_covar=tensor([0.0188, 0.0203, 0.0100, 0.0159, 0.0219, 0.0238, 0.0264, 0.0203], + device='cuda:2'), out_proj_covar=tensor([1.6779e-04, 1.8716e-04, 9.3226e-05, 1.4410e-04, 2.0139e-04, 2.2071e-04, + 2.4145e-04, 1.8868e-04], device='cuda:2') +2023-03-08 23:30:30,150 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30018.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 23:30:35,520 INFO [train.py:898] (2/4) Epoch 9, batch 950, loss[loss=0.2092, simple_loss=0.2917, pruned_loss=0.06333, over 18617.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2822, pruned_loss=0.05904, over 3552096.55 frames. ], batch size: 52, lr: 1.28e-02, grad_scale: 8.0 +2023-03-08 23:30:56,114 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.525e+02 3.421e+02 3.887e+02 4.758e+02 7.533e+02, threshold=7.773e+02, percent-clipped=0.0 +2023-03-08 23:31:10,572 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4830, 3.6418, 5.3630, 4.5331, 3.2903, 3.2217, 4.4763, 5.4747], + device='cuda:2'), covar=tensor([0.0872, 0.1912, 0.0058, 0.0282, 0.0844, 0.0967, 0.0317, 0.0070], + device='cuda:2'), in_proj_covar=tensor([0.0135, 0.0226, 0.0088, 0.0152, 0.0170, 0.0171, 0.0159, 0.0123], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-08 23:31:17,132 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9446, 2.4904, 2.2151, 2.2731, 2.9941, 2.8969, 2.6284, 2.4811], + device='cuda:2'), covar=tensor([0.0183, 0.0256, 0.0681, 0.0381, 0.0216, 0.0197, 0.0408, 0.0399], + device='cuda:2'), in_proj_covar=tensor([0.0110, 0.0093, 0.0142, 0.0124, 0.0093, 0.0076, 0.0120, 0.0117], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 23:31:34,973 INFO [train.py:898] (2/4) Epoch 9, batch 1000, loss[loss=0.2069, simple_loss=0.2837, pruned_loss=0.06501, over 18407.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2825, pruned_loss=0.05935, over 3544158.68 frames. ], batch size: 48, lr: 1.28e-02, grad_scale: 8.0 +2023-03-08 23:31:42,218 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7857, 3.2201, 4.2954, 3.9914, 3.1124, 4.7570, 4.1199, 2.8664], + device='cuda:2'), covar=tensor([0.0413, 0.1062, 0.0176, 0.0236, 0.1063, 0.0120, 0.0319, 0.0996], + device='cuda:2'), in_proj_covar=tensor([0.0179, 0.0210, 0.0136, 0.0133, 0.0204, 0.0175, 0.0193, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 23:32:34,180 INFO [train.py:898] (2/4) Epoch 9, batch 1050, loss[loss=0.1871, simple_loss=0.2781, pruned_loss=0.04805, over 18311.00 frames. ], tot_loss[loss=0.201, simple_loss=0.283, pruned_loss=0.05944, over 3545231.67 frames. ], batch size: 54, lr: 1.28e-02, grad_scale: 8.0 +2023-03-08 23:32:36,818 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30125.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:32:49,225 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6349, 2.9541, 4.0527, 3.9193, 2.6007, 4.4337, 3.8660, 2.5668], + device='cuda:2'), covar=tensor([0.0397, 0.1226, 0.0191, 0.0235, 0.1468, 0.0212, 0.0388, 0.1191], + device='cuda:2'), in_proj_covar=tensor([0.0179, 0.0210, 0.0137, 0.0132, 0.0205, 0.0176, 0.0192, 0.0192], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 23:32:52,481 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.56 vs. limit=5.0 +2023-03-08 23:32:53,970 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.406e+02 3.588e+02 4.236e+02 5.310e+02 1.075e+03, threshold=8.473e+02, percent-clipped=3.0 +2023-03-08 23:33:15,658 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30158.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:33:32,455 INFO [train.py:898] (2/4) Epoch 9, batch 1100, loss[loss=0.1634, simple_loss=0.2403, pruned_loss=0.04327, over 18482.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2831, pruned_loss=0.05932, over 3556182.21 frames. ], batch size: 43, lr: 1.28e-02, grad_scale: 8.0 +2023-03-08 23:33:47,849 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30186.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:33:56,345 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3514, 4.3371, 2.5947, 4.3263, 5.4019, 2.4654, 3.8838, 4.1050], + device='cuda:2'), covar=tensor([0.0059, 0.1166, 0.1523, 0.0514, 0.0042, 0.1435, 0.0675, 0.0668], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0216, 0.0186, 0.0184, 0.0083, 0.0172, 0.0199, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 23:33:56,594 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-08 23:34:00,357 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4482, 2.7838, 3.7396, 3.7112, 2.6495, 4.1409, 3.7918, 2.6267], + device='cuda:2'), covar=tensor([0.0445, 0.1259, 0.0193, 0.0248, 0.1340, 0.0174, 0.0371, 0.1087], + device='cuda:2'), in_proj_covar=tensor([0.0180, 0.0212, 0.0139, 0.0132, 0.0209, 0.0177, 0.0195, 0.0194], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 23:34:12,847 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30207.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:34:15,722 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30209.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:34:31,342 INFO [train.py:898] (2/4) Epoch 9, batch 1150, loss[loss=0.2518, simple_loss=0.3156, pruned_loss=0.09403, over 12146.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2835, pruned_loss=0.05984, over 3553534.51 frames. ], batch size: 129, lr: 1.28e-02, grad_scale: 8.0 +2023-03-08 23:34:50,756 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.527e+02 3.691e+02 4.407e+02 5.401e+02 1.436e+03, threshold=8.814e+02, percent-clipped=4.0 +2023-03-08 23:35:09,078 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30255.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:35:17,197 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5578, 3.4079, 1.7119, 4.2462, 2.9731, 4.3420, 2.0821, 3.6806], + device='cuda:2'), covar=tensor([0.0534, 0.0809, 0.1611, 0.0339, 0.0850, 0.0273, 0.1261, 0.0428], + device='cuda:2'), in_proj_covar=tensor([0.0177, 0.0203, 0.0174, 0.0213, 0.0173, 0.0213, 0.0184, 0.0175], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 23:35:21,678 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8949, 4.5505, 4.7082, 3.4164, 3.7730, 3.4060, 2.8582, 2.3903], + device='cuda:2'), covar=tensor([0.0205, 0.0148, 0.0071, 0.0291, 0.0354, 0.0226, 0.0640, 0.0841], + device='cuda:2'), in_proj_covar=tensor([0.0051, 0.0043, 0.0042, 0.0055, 0.0074, 0.0051, 0.0068, 0.0074], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 23:35:27,584 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30270.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:35:30,584 INFO [train.py:898] (2/4) Epoch 9, batch 1200, loss[loss=0.2216, simple_loss=0.3083, pruned_loss=0.06747, over 18484.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2833, pruned_loss=0.05975, over 3566286.50 frames. ], batch size: 53, lr: 1.28e-02, grad_scale: 8.0 +2023-03-08 23:35:54,094 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3476, 2.6683, 2.2698, 2.4832, 3.3340, 3.3756, 2.7541, 2.7452], + device='cuda:2'), covar=tensor([0.0208, 0.0302, 0.0654, 0.0366, 0.0172, 0.0136, 0.0435, 0.0342], + device='cuda:2'), in_proj_covar=tensor([0.0112, 0.0093, 0.0144, 0.0127, 0.0092, 0.0077, 0.0121, 0.0116], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 23:36:04,524 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4582, 3.9690, 5.2081, 4.3085, 2.9760, 2.6347, 4.2909, 5.4223], + device='cuda:2'), covar=tensor([0.0931, 0.1172, 0.0077, 0.0358, 0.0937, 0.1149, 0.0373, 0.0103], + device='cuda:2'), in_proj_covar=tensor([0.0136, 0.0225, 0.0089, 0.0153, 0.0170, 0.0173, 0.0162, 0.0125], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-08 23:36:24,842 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30318.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 23:36:30,226 INFO [train.py:898] (2/4) Epoch 9, batch 1250, loss[loss=0.2742, simple_loss=0.3282, pruned_loss=0.1101, over 12736.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2831, pruned_loss=0.05974, over 3563501.77 frames. ], batch size: 130, lr: 1.28e-02, grad_scale: 8.0 +2023-03-08 23:36:49,695 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.180e+02 3.477e+02 4.151e+02 5.169e+02 1.110e+03, threshold=8.302e+02, percent-clipped=2.0 +2023-03-08 23:37:14,305 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30360.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:37:21,684 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30366.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:37:29,900 INFO [train.py:898] (2/4) Epoch 9, batch 1300, loss[loss=0.1849, simple_loss=0.2724, pruned_loss=0.04865, over 18294.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2828, pruned_loss=0.05962, over 3571899.52 frames. ], batch size: 54, lr: 1.28e-02, grad_scale: 8.0 +2023-03-08 23:37:54,318 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.88 vs. limit=5.0 +2023-03-08 23:37:59,082 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-08 23:38:26,583 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30421.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:38:28,923 INFO [train.py:898] (2/4) Epoch 9, batch 1350, loss[loss=0.2187, simple_loss=0.302, pruned_loss=0.06773, over 18592.00 frames. ], tot_loss[loss=0.2014, simple_loss=0.2836, pruned_loss=0.05963, over 3577539.31 frames. ], batch size: 54, lr: 1.27e-02, grad_scale: 8.0 +2023-03-08 23:38:48,274 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.096e+02 3.366e+02 4.141e+02 5.097e+02 1.343e+03, threshold=8.282e+02, percent-clipped=5.0 +2023-03-08 23:39:05,409 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-08 23:39:09,446 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30458.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:39:27,634 INFO [train.py:898] (2/4) Epoch 9, batch 1400, loss[loss=0.2047, simple_loss=0.2885, pruned_loss=0.06044, over 18629.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2839, pruned_loss=0.05982, over 3574609.03 frames. ], batch size: 52, lr: 1.27e-02, grad_scale: 8.0 +2023-03-08 23:39:28,490 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.86 vs. limit=2.0 +2023-03-08 23:39:37,296 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30481.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:40:06,884 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30506.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:40:26,577 INFO [train.py:898] (2/4) Epoch 9, batch 1450, loss[loss=0.1833, simple_loss=0.2629, pruned_loss=0.05184, over 18260.00 frames. ], tot_loss[loss=0.2004, simple_loss=0.2825, pruned_loss=0.05915, over 3566386.67 frames. ], batch size: 47, lr: 1.27e-02, grad_scale: 8.0 +2023-03-08 23:40:46,482 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.569e+02 3.548e+02 4.179e+02 5.117e+02 1.123e+03, threshold=8.357e+02, percent-clipped=2.0 +2023-03-08 23:41:01,266 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6877, 4.1157, 2.7302, 3.8918, 4.0046, 4.1846, 3.9929, 2.6223], + device='cuda:2'), covar=tensor([0.0159, 0.0074, 0.0605, 0.0181, 0.0078, 0.0068, 0.0115, 0.0841], + device='cuda:2'), in_proj_covar=tensor([0.0072, 0.0061, 0.0086, 0.0075, 0.0072, 0.0059, 0.0073, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-08 23:41:16,107 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30565.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:41:24,921 INFO [train.py:898] (2/4) Epoch 9, batch 1500, loss[loss=0.2069, simple_loss=0.2918, pruned_loss=0.06093, over 18380.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.282, pruned_loss=0.05915, over 3563420.32 frames. ], batch size: 56, lr: 1.27e-02, grad_scale: 8.0 +2023-03-08 23:41:50,798 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-08 23:42:01,960 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6568, 2.0051, 2.9375, 2.8938, 3.6677, 5.2461, 4.6286, 3.9925], + device='cuda:2'), covar=tensor([0.0962, 0.1802, 0.1767, 0.1075, 0.1347, 0.0062, 0.0316, 0.0399], + device='cuda:2'), in_proj_covar=tensor([0.0221, 0.0275, 0.0284, 0.0237, 0.0346, 0.0162, 0.0242, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-08 23:42:24,048 INFO [train.py:898] (2/4) Epoch 9, batch 1550, loss[loss=0.2127, simple_loss=0.3009, pruned_loss=0.06223, over 17250.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2816, pruned_loss=0.05889, over 3565556.33 frames. ], batch size: 78, lr: 1.27e-02, grad_scale: 8.0 +2023-03-08 23:42:44,354 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.188e+02 3.439e+02 3.957e+02 5.356e+02 1.144e+03, threshold=7.914e+02, percent-clipped=4.0 +2023-03-08 23:43:01,920 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30655.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:43:23,617 INFO [train.py:898] (2/4) Epoch 9, batch 1600, loss[loss=0.2379, simple_loss=0.3153, pruned_loss=0.0803, over 15964.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2828, pruned_loss=0.05946, over 3555606.51 frames. ], batch size: 95, lr: 1.27e-02, grad_scale: 8.0 +2023-03-08 23:44:15,196 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30716.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:44:15,434 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30716.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 23:44:22,849 INFO [train.py:898] (2/4) Epoch 9, batch 1650, loss[loss=0.2207, simple_loss=0.302, pruned_loss=0.06971, over 17998.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2816, pruned_loss=0.05856, over 3577404.43 frames. ], batch size: 65, lr: 1.27e-02, grad_scale: 16.0 +2023-03-08 23:44:35,280 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6949, 2.8049, 4.1196, 3.9222, 2.8016, 4.5496, 4.0076, 2.5542], + device='cuda:2'), covar=tensor([0.0367, 0.1275, 0.0231, 0.0240, 0.1246, 0.0141, 0.0317, 0.0994], + device='cuda:2'), in_proj_covar=tensor([0.0179, 0.0211, 0.0140, 0.0134, 0.0206, 0.0175, 0.0196, 0.0188], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-08 23:44:41,454 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8805, 4.9756, 5.0047, 4.6566, 4.5786, 4.7457, 5.1176, 5.1117], + device='cuda:2'), covar=tensor([0.0049, 0.0052, 0.0045, 0.0083, 0.0067, 0.0109, 0.0066, 0.0078], + device='cuda:2'), in_proj_covar=tensor([0.0071, 0.0052, 0.0054, 0.0067, 0.0058, 0.0078, 0.0065, 0.0065], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 23:44:43,336 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.525e+02 3.575e+02 4.515e+02 5.590e+02 1.202e+03, threshold=9.030e+02, percent-clipped=6.0 +2023-03-08 23:45:22,489 INFO [train.py:898] (2/4) Epoch 9, batch 1700, loss[loss=0.2249, simple_loss=0.3008, pruned_loss=0.07446, over 18049.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2818, pruned_loss=0.05855, over 3573857.17 frames. ], batch size: 62, lr: 1.27e-02, grad_scale: 16.0 +2023-03-08 23:45:26,489 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3594, 2.6542, 2.4619, 2.6652, 3.5423, 3.4216, 2.9447, 2.8866], + device='cuda:2'), covar=tensor([0.0215, 0.0368, 0.0541, 0.0328, 0.0156, 0.0142, 0.0379, 0.0335], + device='cuda:2'), in_proj_covar=tensor([0.0109, 0.0094, 0.0141, 0.0123, 0.0091, 0.0076, 0.0122, 0.0116], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-08 23:45:32,826 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30781.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:45:47,586 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5904, 2.7451, 2.6518, 2.7869, 3.6380, 3.4834, 3.0849, 3.1276], + device='cuda:2'), covar=tensor([0.0156, 0.0316, 0.0602, 0.0347, 0.0169, 0.0211, 0.0385, 0.0264], + device='cuda:2'), in_proj_covar=tensor([0.0108, 0.0094, 0.0141, 0.0123, 0.0091, 0.0076, 0.0121, 0.0116], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-08 23:46:15,012 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0620, 4.9611, 5.1147, 4.7481, 4.7327, 4.8475, 5.2952, 5.1975], + device='cuda:2'), covar=tensor([0.0051, 0.0075, 0.0050, 0.0091, 0.0062, 0.0112, 0.0092, 0.0115], + device='cuda:2'), in_proj_covar=tensor([0.0074, 0.0055, 0.0055, 0.0069, 0.0060, 0.0081, 0.0067, 0.0068], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0002, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-08 23:46:22,138 INFO [train.py:898] (2/4) Epoch 9, batch 1750, loss[loss=0.1993, simple_loss=0.2714, pruned_loss=0.06366, over 17762.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2818, pruned_loss=0.05827, over 3576261.85 frames. ], batch size: 39, lr: 1.27e-02, grad_scale: 16.0 +2023-03-08 23:46:29,118 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30829.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:46:43,009 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.320e+02 3.498e+02 4.092e+02 4.863e+02 1.038e+03, threshold=8.183e+02, percent-clipped=2.0 +2023-03-08 23:47:11,811 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30865.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:47:20,603 INFO [train.py:898] (2/4) Epoch 9, batch 1800, loss[loss=0.1854, simple_loss=0.2785, pruned_loss=0.04619, over 18378.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2819, pruned_loss=0.05787, over 3583555.08 frames. ], batch size: 55, lr: 1.27e-02, grad_scale: 8.0 +2023-03-08 23:47:51,797 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3765, 4.3032, 5.3608, 2.9462, 4.6667, 2.9577, 3.3090, 2.1968], + device='cuda:2'), covar=tensor([0.0610, 0.0560, 0.0056, 0.0604, 0.0405, 0.1838, 0.2043, 0.1559], + device='cuda:2'), in_proj_covar=tensor([0.0186, 0.0205, 0.0104, 0.0160, 0.0220, 0.0237, 0.0268, 0.0203], + device='cuda:2'), out_proj_covar=tensor([1.6479e-04, 1.8756e-04, 9.6036e-05, 1.4484e-04, 2.0135e-04, 2.1968e-04, + 2.4394e-04, 1.8821e-04], device='cuda:2') +2023-03-08 23:47:57,659 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30903.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:48:09,039 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=30913.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:48:20,085 INFO [train.py:898] (2/4) Epoch 9, batch 1850, loss[loss=0.1999, simple_loss=0.2863, pruned_loss=0.05678, over 16235.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.282, pruned_loss=0.05792, over 3580282.12 frames. ], batch size: 94, lr: 1.26e-02, grad_scale: 8.0 +2023-03-08 23:48:22,784 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30925.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:48:34,016 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30934.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:48:42,059 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.240e+02 3.365e+02 3.795e+02 4.610e+02 7.960e+02, threshold=7.590e+02, percent-clipped=0.0 +2023-03-08 23:49:09,595 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30964.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:49:19,302 INFO [train.py:898] (2/4) Epoch 9, batch 1900, loss[loss=0.2186, simple_loss=0.3001, pruned_loss=0.06853, over 18365.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.282, pruned_loss=0.05811, over 3584652.51 frames. ], batch size: 56, lr: 1.26e-02, grad_scale: 8.0 +2023-03-08 23:49:34,714 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30986.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:49:45,362 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30995.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:50:04,630 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31011.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 23:50:04,802 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31011.0, num_to_drop=1, layers_to_drop={1} +2023-03-08 23:50:10,277 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31016.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:50:17,960 INFO [train.py:898] (2/4) Epoch 9, batch 1950, loss[loss=0.2109, simple_loss=0.2946, pruned_loss=0.06354, over 18353.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2829, pruned_loss=0.05842, over 3582440.90 frames. ], batch size: 56, lr: 1.26e-02, grad_scale: 8.0 +2023-03-08 23:50:39,082 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.073e+02 3.300e+02 4.115e+02 5.228e+02 1.013e+03, threshold=8.231e+02, percent-clipped=3.0 +2023-03-08 23:51:06,737 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31064.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:51:16,289 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31072.0, num_to_drop=1, layers_to_drop={2} +2023-03-08 23:51:16,962 INFO [train.py:898] (2/4) Epoch 9, batch 2000, loss[loss=0.1857, simple_loss=0.2711, pruned_loss=0.05011, over 18482.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2826, pruned_loss=0.05812, over 3596102.42 frames. ], batch size: 47, lr: 1.26e-02, grad_scale: 8.0 +2023-03-08 23:51:46,446 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.57 vs. limit=2.0 +2023-03-08 23:52:15,796 INFO [train.py:898] (2/4) Epoch 9, batch 2050, loss[loss=0.1961, simple_loss=0.2783, pruned_loss=0.05694, over 18295.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2838, pruned_loss=0.059, over 3590545.52 frames. ], batch size: 49, lr: 1.26e-02, grad_scale: 8.0 +2023-03-08 23:52:22,976 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4917, 6.0227, 5.5218, 5.8139, 5.5495, 5.5654, 6.1271, 6.0149], + device='cuda:2'), covar=tensor([0.1139, 0.0823, 0.0417, 0.0700, 0.1424, 0.0595, 0.0543, 0.0708], + device='cuda:2'), in_proj_covar=tensor([0.0489, 0.0400, 0.0302, 0.0437, 0.0597, 0.0434, 0.0555, 0.0425], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-08 23:52:37,039 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.601e+02 3.647e+02 4.491e+02 5.379e+02 9.813e+02, threshold=8.982e+02, percent-clipped=3.0 +2023-03-08 23:52:51,914 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7211, 2.0801, 2.8006, 2.7596, 3.5755, 5.4143, 4.7384, 4.5652], + device='cuda:2'), covar=tensor([0.0971, 0.1724, 0.2034, 0.1167, 0.1481, 0.0065, 0.0288, 0.0260], + device='cuda:2'), in_proj_covar=tensor([0.0221, 0.0278, 0.0285, 0.0238, 0.0347, 0.0166, 0.0242, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-08 23:53:05,142 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2444, 5.2030, 4.7188, 5.1445, 5.1533, 4.5405, 5.0844, 4.8172], + device='cuda:2'), covar=tensor([0.0397, 0.0483, 0.1455, 0.0775, 0.0558, 0.0479, 0.0399, 0.0899], + device='cuda:2'), in_proj_covar=tensor([0.0375, 0.0433, 0.0580, 0.0348, 0.0321, 0.0394, 0.0420, 0.0541], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-08 23:53:15,211 INFO [train.py:898] (2/4) Epoch 9, batch 2100, loss[loss=0.196, simple_loss=0.286, pruned_loss=0.05297, over 18584.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2824, pruned_loss=0.05823, over 3590532.17 frames. ], batch size: 54, lr: 1.26e-02, grad_scale: 8.0 +2023-03-08 23:53:21,344 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5657, 3.8453, 5.4453, 4.4687, 3.3379, 3.0692, 4.4973, 5.3730], + device='cuda:2'), covar=tensor([0.0819, 0.1570, 0.0055, 0.0280, 0.0841, 0.0994, 0.0303, 0.0086], + device='cuda:2'), in_proj_covar=tensor([0.0133, 0.0226, 0.0088, 0.0151, 0.0169, 0.0170, 0.0159, 0.0125], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-08 23:53:28,718 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3513, 5.9150, 5.3526, 5.5862, 5.3919, 5.3736, 5.9183, 5.9082], + device='cuda:2'), covar=tensor([0.1095, 0.0614, 0.0429, 0.0702, 0.1393, 0.0632, 0.0544, 0.0571], + device='cuda:2'), in_proj_covar=tensor([0.0492, 0.0403, 0.0304, 0.0443, 0.0603, 0.0439, 0.0556, 0.0428], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-08 23:53:28,776 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6114, 5.1493, 5.0970, 5.1720, 4.8073, 5.0199, 4.4362, 5.0004], + device='cuda:2'), covar=tensor([0.0243, 0.0291, 0.0222, 0.0345, 0.0281, 0.0242, 0.1102, 0.0289], + device='cuda:2'), in_proj_covar=tensor([0.0163, 0.0208, 0.0194, 0.0223, 0.0204, 0.0215, 0.0275, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-08 23:54:14,808 INFO [train.py:898] (2/4) Epoch 9, batch 2150, loss[loss=0.1925, simple_loss=0.2799, pruned_loss=0.05256, over 18394.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2824, pruned_loss=0.05832, over 3589900.85 frames. ], batch size: 52, lr: 1.26e-02, grad_scale: 8.0 +2023-03-08 23:54:28,161 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.97 vs. limit=5.0 +2023-03-08 23:54:35,294 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.335e+02 3.491e+02 4.023e+02 4.961e+02 1.002e+03, threshold=8.046e+02, percent-clipped=2.0 +2023-03-08 23:54:56,912 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31259.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:55:13,348 INFO [train.py:898] (2/4) Epoch 9, batch 2200, loss[loss=0.2073, simple_loss=0.2969, pruned_loss=0.05889, over 17208.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2827, pruned_loss=0.05833, over 3594618.91 frames. ], batch size: 78, lr: 1.26e-02, grad_scale: 4.0 +2023-03-08 23:55:22,618 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31281.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:55:32,564 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31290.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:55:57,326 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31311.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:56:11,960 INFO [train.py:898] (2/4) Epoch 9, batch 2250, loss[loss=0.2001, simple_loss=0.2931, pruned_loss=0.05352, over 18581.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2842, pruned_loss=0.05908, over 3574120.67 frames. ], batch size: 54, lr: 1.26e-02, grad_scale: 4.0 +2023-03-08 23:56:33,580 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.219e+02 3.737e+02 4.347e+02 5.503e+02 2.827e+03, threshold=8.695e+02, percent-clipped=9.0 +2023-03-08 23:56:53,441 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31359.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:56:59,163 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-08 23:57:02,907 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31367.0, num_to_drop=1, layers_to_drop={3} +2023-03-08 23:57:09,987 INFO [train.py:898] (2/4) Epoch 9, batch 2300, loss[loss=0.1887, simple_loss=0.272, pruned_loss=0.05277, over 18348.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2834, pruned_loss=0.05892, over 3572312.18 frames. ], batch size: 46, lr: 1.26e-02, grad_scale: 4.0 +2023-03-08 23:57:52,698 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31409.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:58:09,049 INFO [train.py:898] (2/4) Epoch 9, batch 2350, loss[loss=0.1758, simple_loss=0.2602, pruned_loss=0.04567, over 18258.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2822, pruned_loss=0.05845, over 3569460.92 frames. ], batch size: 47, lr: 1.25e-02, grad_scale: 4.0 +2023-03-08 23:58:31,515 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.260e+02 3.212e+02 3.852e+02 4.857e+02 1.133e+03, threshold=7.704e+02, percent-clipped=2.0 +2023-03-08 23:58:48,150 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3296, 5.8838, 5.3241, 5.5274, 5.4032, 5.3288, 5.9022, 5.9007], + device='cuda:2'), covar=tensor([0.1137, 0.0715, 0.0477, 0.0733, 0.1398, 0.0731, 0.0573, 0.0614], + device='cuda:2'), in_proj_covar=tensor([0.0488, 0.0402, 0.0302, 0.0438, 0.0600, 0.0437, 0.0556, 0.0429], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-08 23:59:04,408 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31470.0, num_to_drop=0, layers_to_drop=set() +2023-03-08 23:59:07,377 INFO [train.py:898] (2/4) Epoch 9, batch 2400, loss[loss=0.185, simple_loss=0.2685, pruned_loss=0.05077, over 18359.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2819, pruned_loss=0.05832, over 3571237.07 frames. ], batch size: 56, lr: 1.25e-02, grad_scale: 8.0 +2023-03-08 23:59:29,308 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31491.0, num_to_drop=1, layers_to_drop={0} +2023-03-08 23:59:48,008 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7154, 5.4128, 5.4689, 5.3547, 5.0030, 5.2912, 4.6822, 5.2196], + device='cuda:2'), covar=tensor([0.0303, 0.0249, 0.0154, 0.0296, 0.0353, 0.0236, 0.1059, 0.0303], + device='cuda:2'), in_proj_covar=tensor([0.0162, 0.0203, 0.0189, 0.0218, 0.0200, 0.0209, 0.0269, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-08 23:59:58,470 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5245, 3.5178, 3.3504, 2.8730, 3.3507, 2.5479, 2.6102, 3.5193], + device='cuda:2'), covar=tensor([0.0044, 0.0073, 0.0091, 0.0149, 0.0072, 0.0177, 0.0178, 0.0049], + device='cuda:2'), in_proj_covar=tensor([0.0080, 0.0104, 0.0096, 0.0142, 0.0094, 0.0140, 0.0147, 0.0078], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 00:00:06,632 INFO [train.py:898] (2/4) Epoch 9, batch 2450, loss[loss=0.2154, simple_loss=0.3038, pruned_loss=0.06357, over 18363.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2811, pruned_loss=0.05784, over 3571968.36 frames. ], batch size: 55, lr: 1.25e-02, grad_scale: 8.0 +2023-03-09 00:00:29,271 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.799e+02 3.437e+02 4.058e+02 5.079e+02 1.109e+03, threshold=8.115e+02, percent-clipped=5.0 +2023-03-09 00:00:40,997 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31552.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 00:00:45,736 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-09 00:00:49,331 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31559.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:01:05,461 INFO [train.py:898] (2/4) Epoch 9, batch 2500, loss[loss=0.1638, simple_loss=0.2438, pruned_loss=0.04192, over 18155.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2804, pruned_loss=0.05779, over 3571380.30 frames. ], batch size: 44, lr: 1.25e-02, grad_scale: 8.0 +2023-03-09 00:01:09,158 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6006, 5.2424, 5.2586, 5.2546, 4.8326, 5.1262, 4.4875, 5.0441], + device='cuda:2'), covar=tensor([0.0242, 0.0239, 0.0182, 0.0303, 0.0304, 0.0248, 0.1096, 0.0309], + device='cuda:2'), in_proj_covar=tensor([0.0161, 0.0203, 0.0189, 0.0218, 0.0200, 0.0210, 0.0268, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-09 00:01:15,209 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31581.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:01:25,906 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31590.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:01:45,091 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31607.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:02:03,050 INFO [train.py:898] (2/4) Epoch 9, batch 2550, loss[loss=0.196, simple_loss=0.2827, pruned_loss=0.0547, over 18381.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2815, pruned_loss=0.05832, over 3591713.13 frames. ], batch size: 50, lr: 1.25e-02, grad_scale: 8.0 +2023-03-09 00:02:10,591 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31629.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:02:21,867 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31638.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:02:26,082 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.346e+02 3.602e+02 4.315e+02 5.683e+02 1.136e+03, threshold=8.630e+02, percent-clipped=7.0 +2023-03-09 00:02:54,656 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31667.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 00:03:01,163 INFO [train.py:898] (2/4) Epoch 9, batch 2600, loss[loss=0.2109, simple_loss=0.295, pruned_loss=0.06339, over 17015.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2827, pruned_loss=0.0586, over 3580492.69 frames. ], batch size: 78, lr: 1.25e-02, grad_scale: 8.0 +2023-03-09 00:03:37,763 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31704.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:03:50,206 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=31715.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 00:03:58,992 INFO [train.py:898] (2/4) Epoch 9, batch 2650, loss[loss=0.2219, simple_loss=0.3035, pruned_loss=0.0701, over 18409.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2828, pruned_loss=0.0584, over 3584056.42 frames. ], batch size: 52, lr: 1.25e-02, grad_scale: 8.0 +2023-03-09 00:04:21,586 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.212e+02 3.574e+02 4.199e+02 5.233e+02 1.424e+03, threshold=8.398e+02, percent-clipped=3.0 +2023-03-09 00:04:30,614 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4992, 1.8671, 2.6026, 2.8235, 3.5898, 5.2336, 4.5738, 4.0618], + device='cuda:2'), covar=tensor([0.1115, 0.2010, 0.2183, 0.1236, 0.1590, 0.0075, 0.0333, 0.0425], + device='cuda:2'), in_proj_covar=tensor([0.0221, 0.0278, 0.0286, 0.0238, 0.0346, 0.0167, 0.0241, 0.0188], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 00:04:48,338 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31765.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:04:48,520 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31765.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:04:57,870 INFO [train.py:898] (2/4) Epoch 9, batch 2700, loss[loss=0.1677, simple_loss=0.2404, pruned_loss=0.04749, over 17657.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2821, pruned_loss=0.05811, over 3572083.96 frames. ], batch size: 39, lr: 1.25e-02, grad_scale: 8.0 +2023-03-09 00:05:15,609 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31788.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:05:56,592 INFO [train.py:898] (2/4) Epoch 9, batch 2750, loss[loss=0.215, simple_loss=0.291, pruned_loss=0.06954, over 17996.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2824, pruned_loss=0.0582, over 3581096.49 frames. ], batch size: 65, lr: 1.25e-02, grad_scale: 8.0 +2023-03-09 00:05:56,845 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31823.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:06:19,346 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.068e+02 3.248e+02 4.036e+02 4.734e+02 1.785e+03, threshold=8.071e+02, percent-clipped=3.0 +2023-03-09 00:06:25,197 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31847.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 00:06:27,597 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31849.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:06:55,870 INFO [train.py:898] (2/4) Epoch 9, batch 2800, loss[loss=0.1677, simple_loss=0.2488, pruned_loss=0.04331, over 18137.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2819, pruned_loss=0.05796, over 3575157.68 frames. ], batch size: 40, lr: 1.25e-02, grad_scale: 8.0 +2023-03-09 00:07:02,101 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31878.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:07:09,102 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31884.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:07:45,623 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31915.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:07:49,451 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-09 00:07:54,184 INFO [train.py:898] (2/4) Epoch 9, batch 2850, loss[loss=0.2198, simple_loss=0.3025, pruned_loss=0.06854, over 18247.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.282, pruned_loss=0.05785, over 3589907.13 frames. ], batch size: 60, lr: 1.25e-02, grad_scale: 8.0 +2023-03-09 00:08:01,857 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0184, 4.6808, 4.8193, 3.4529, 3.8525, 3.7358, 2.7410, 2.5701], + device='cuda:2'), covar=tensor([0.0190, 0.0165, 0.0079, 0.0299, 0.0328, 0.0204, 0.0752, 0.0880], + device='cuda:2'), in_proj_covar=tensor([0.0055, 0.0045, 0.0044, 0.0057, 0.0076, 0.0053, 0.0071, 0.0078], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 00:08:07,396 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0788, 5.0357, 5.2023, 4.7889, 4.8665, 4.9359, 5.2915, 5.2680], + device='cuda:2'), covar=tensor([0.0057, 0.0077, 0.0050, 0.0093, 0.0061, 0.0093, 0.0066, 0.0087], + device='cuda:2'), in_proj_covar=tensor([0.0075, 0.0055, 0.0055, 0.0070, 0.0060, 0.0081, 0.0068, 0.0069], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 00:08:13,154 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31939.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:08:16,250 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.274e+02 3.441e+02 4.233e+02 5.343e+02 2.679e+03, threshold=8.467e+02, percent-clipped=8.0 +2023-03-09 00:08:26,680 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31950.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:08:30,334 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4424, 3.9178, 3.9274, 2.9949, 3.3164, 3.1821, 2.4213, 2.3376], + device='cuda:2'), covar=tensor([0.0216, 0.0160, 0.0084, 0.0292, 0.0374, 0.0234, 0.0692, 0.0798], + device='cuda:2'), in_proj_covar=tensor([0.0055, 0.0045, 0.0044, 0.0057, 0.0076, 0.0053, 0.0070, 0.0077], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 00:08:52,762 INFO [train.py:898] (2/4) Epoch 9, batch 2900, loss[loss=0.2192, simple_loss=0.3081, pruned_loss=0.06513, over 18308.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2808, pruned_loss=0.05726, over 3593845.60 frames. ], batch size: 57, lr: 1.24e-02, grad_scale: 8.0 +2023-03-09 00:08:56,470 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31976.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:09:43,674 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32011.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:09:56,716 INFO [train.py:898] (2/4) Epoch 9, batch 2950, loss[loss=0.2365, simple_loss=0.3152, pruned_loss=0.07895, over 16948.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2813, pruned_loss=0.05741, over 3597100.25 frames. ], batch size: 78, lr: 1.24e-02, grad_scale: 8.0 +2023-03-09 00:10:19,145 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.223e+02 3.183e+02 3.919e+02 4.606e+02 8.522e+02, threshold=7.838e+02, percent-clipped=1.0 +2023-03-09 00:10:41,319 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32060.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:10:45,383 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2870, 5.8898, 5.4403, 5.6611, 5.3666, 5.3676, 5.9163, 5.8927], + device='cuda:2'), covar=tensor([0.1204, 0.0703, 0.0485, 0.0704, 0.1597, 0.0721, 0.0580, 0.0612], + device='cuda:2'), in_proj_covar=tensor([0.0499, 0.0408, 0.0307, 0.0452, 0.0609, 0.0447, 0.0568, 0.0431], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 00:10:47,655 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32065.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:10:56,315 INFO [train.py:898] (2/4) Epoch 9, batch 3000, loss[loss=0.1895, simple_loss=0.2716, pruned_loss=0.0537, over 18302.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2802, pruned_loss=0.05693, over 3594553.84 frames. ], batch size: 49, lr: 1.24e-02, grad_scale: 8.0 +2023-03-09 00:10:56,315 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 00:11:08,347 INFO [train.py:932] (2/4) Epoch 9, validation: loss=0.1618, simple_loss=0.2644, pruned_loss=0.02958, over 944034.00 frames. +2023-03-09 00:11:08,348 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 00:11:55,411 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32113.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:12:07,319 INFO [train.py:898] (2/4) Epoch 9, batch 3050, loss[loss=0.2121, simple_loss=0.2919, pruned_loss=0.06618, over 17100.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2797, pruned_loss=0.05703, over 3574863.24 frames. ], batch size: 78, lr: 1.24e-02, grad_scale: 8.0 +2023-03-09 00:12:20,663 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8375, 4.6727, 4.7914, 3.4692, 3.7760, 3.5748, 2.8309, 2.4384], + device='cuda:2'), covar=tensor([0.0225, 0.0149, 0.0066, 0.0251, 0.0320, 0.0260, 0.0702, 0.0921], + device='cuda:2'), in_proj_covar=tensor([0.0055, 0.0046, 0.0045, 0.0057, 0.0076, 0.0053, 0.0071, 0.0078], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 00:12:29,406 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.233e+02 3.379e+02 3.882e+02 4.685e+02 8.666e+02, threshold=7.765e+02, percent-clipped=1.0 +2023-03-09 00:12:32,524 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32144.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:12:35,886 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32147.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 00:12:59,417 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8890, 3.0560, 4.4404, 4.0719, 2.8225, 4.7264, 4.0563, 2.9875], + device='cuda:2'), covar=tensor([0.0299, 0.1128, 0.0170, 0.0217, 0.1277, 0.0154, 0.0311, 0.0841], + device='cuda:2'), in_proj_covar=tensor([0.0173, 0.0206, 0.0134, 0.0130, 0.0201, 0.0170, 0.0187, 0.0184], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 00:13:05,875 INFO [train.py:898] (2/4) Epoch 9, batch 3100, loss[loss=0.2248, simple_loss=0.3087, pruned_loss=0.07045, over 16931.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2791, pruned_loss=0.05666, over 3580979.45 frames. ], batch size: 78, lr: 1.24e-02, grad_scale: 8.0 +2023-03-09 00:13:13,349 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32179.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:13:32,014 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32195.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 00:13:51,083 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5230, 2.0025, 2.5797, 2.8175, 3.4002, 5.1728, 4.5904, 3.9499], + device='cuda:2'), covar=tensor([0.1036, 0.1747, 0.2223, 0.1140, 0.1576, 0.0091, 0.0326, 0.0403], + device='cuda:2'), in_proj_covar=tensor([0.0222, 0.0277, 0.0288, 0.0237, 0.0347, 0.0170, 0.0242, 0.0190], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 00:13:53,701 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-03-09 00:14:05,292 INFO [train.py:898] (2/4) Epoch 9, batch 3150, loss[loss=0.2084, simple_loss=0.2851, pruned_loss=0.0659, over 18029.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2788, pruned_loss=0.05616, over 3584347.08 frames. ], batch size: 65, lr: 1.24e-02, grad_scale: 8.0 +2023-03-09 00:14:18,463 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32234.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:14:28,066 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.206e+02 3.166e+02 3.799e+02 4.714e+02 1.226e+03, threshold=7.598e+02, percent-clipped=2.0 +2023-03-09 00:15:02,108 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32271.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:15:04,021 INFO [train.py:898] (2/4) Epoch 9, batch 3200, loss[loss=0.2082, simple_loss=0.3, pruned_loss=0.05823, over 16938.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2794, pruned_loss=0.05612, over 3585566.99 frames. ], batch size: 78, lr: 1.24e-02, grad_scale: 8.0 +2023-03-09 00:15:24,582 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-09 00:15:43,082 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32306.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:15:50,702 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8338, 3.9750, 5.0850, 2.7760, 4.2168, 2.6584, 2.9705, 1.9375], + device='cuda:2'), covar=tensor([0.0849, 0.0586, 0.0087, 0.0672, 0.0502, 0.1942, 0.2330, 0.1613], + device='cuda:2'), in_proj_covar=tensor([0.0188, 0.0208, 0.0106, 0.0160, 0.0220, 0.0240, 0.0267, 0.0203], + device='cuda:2'), out_proj_covar=tensor([1.6729e-04, 1.8956e-04, 9.8047e-05, 1.4500e-04, 1.9959e-04, 2.2069e-04, + 2.4282e-04, 1.8794e-04], device='cuda:2') +2023-03-09 00:16:02,575 INFO [train.py:898] (2/4) Epoch 9, batch 3250, loss[loss=0.2025, simple_loss=0.2837, pruned_loss=0.06067, over 18291.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2804, pruned_loss=0.05655, over 3594727.92 frames. ], batch size: 49, lr: 1.24e-02, grad_scale: 8.0 +2023-03-09 00:16:24,631 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.580e+02 3.470e+02 4.080e+02 5.186e+02 8.555e+02, threshold=8.161e+02, percent-clipped=3.0 +2023-03-09 00:16:46,357 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32360.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:17:01,461 INFO [train.py:898] (2/4) Epoch 9, batch 3300, loss[loss=0.1757, simple_loss=0.2545, pruned_loss=0.04841, over 18503.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2802, pruned_loss=0.05656, over 3588168.57 frames. ], batch size: 44, lr: 1.24e-02, grad_scale: 8.0 +2023-03-09 00:17:27,438 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0312, 5.0464, 5.1678, 4.7885, 4.9897, 4.8154, 5.2460, 5.2136], + device='cuda:2'), covar=tensor([0.0053, 0.0054, 0.0054, 0.0084, 0.0044, 0.0109, 0.0054, 0.0081], + device='cuda:2'), in_proj_covar=tensor([0.0075, 0.0055, 0.0056, 0.0071, 0.0060, 0.0081, 0.0069, 0.0069], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 00:17:43,082 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32408.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:18:00,699 INFO [train.py:898] (2/4) Epoch 9, batch 3350, loss[loss=0.18, simple_loss=0.2658, pruned_loss=0.04713, over 18581.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2798, pruned_loss=0.05625, over 3586542.57 frames. ], batch size: 54, lr: 1.24e-02, grad_scale: 8.0 +2023-03-09 00:18:03,537 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-09 00:18:22,501 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.160e+02 3.179e+02 3.976e+02 5.107e+02 1.332e+03, threshold=7.951e+02, percent-clipped=3.0 +2023-03-09 00:18:25,103 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32444.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:18:59,649 INFO [train.py:898] (2/4) Epoch 9, batch 3400, loss[loss=0.1987, simple_loss=0.2864, pruned_loss=0.05549, over 18480.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2798, pruned_loss=0.05655, over 3574582.02 frames. ], batch size: 53, lr: 1.23e-02, grad_scale: 8.0 +2023-03-09 00:19:06,733 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32479.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:19:14,711 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3435, 5.0902, 5.3843, 5.2732, 5.1620, 5.8831, 5.5868, 5.2644], + device='cuda:2'), covar=tensor([0.0853, 0.0611, 0.0551, 0.0740, 0.1358, 0.0670, 0.0571, 0.1497], + device='cuda:2'), in_proj_covar=tensor([0.0296, 0.0226, 0.0234, 0.0238, 0.0278, 0.0333, 0.0220, 0.0328], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-09 00:19:16,871 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4107, 5.4438, 5.0027, 5.4350, 5.4264, 4.7402, 5.3270, 5.0059], + device='cuda:2'), covar=tensor([0.0470, 0.0371, 0.1458, 0.0743, 0.0474, 0.0418, 0.0359, 0.0923], + device='cuda:2'), in_proj_covar=tensor([0.0384, 0.0431, 0.0577, 0.0342, 0.0319, 0.0396, 0.0420, 0.0552], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 00:19:16,949 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.2519, 5.4774, 3.0282, 5.2791, 5.1777, 5.5387, 5.2826, 2.8144], + device='cuda:2'), covar=tensor([0.0128, 0.0045, 0.0608, 0.0061, 0.0064, 0.0046, 0.0087, 0.0881], + device='cuda:2'), in_proj_covar=tensor([0.0074, 0.0063, 0.0085, 0.0077, 0.0072, 0.0060, 0.0073, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 00:19:21,717 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32492.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:19:57,941 INFO [train.py:898] (2/4) Epoch 9, batch 3450, loss[loss=0.2287, simple_loss=0.3078, pruned_loss=0.07485, over 18500.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2814, pruned_loss=0.05729, over 3574998.87 frames. ], batch size: 51, lr: 1.23e-02, grad_scale: 8.0 +2023-03-09 00:20:02,596 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32527.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:20:10,762 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32534.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:20:19,239 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.059e+02 3.448e+02 3.896e+02 4.764e+02 9.293e+02, threshold=7.793e+02, percent-clipped=1.0 +2023-03-09 00:20:25,663 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8276, 4.8577, 4.9116, 4.6010, 4.6164, 4.6602, 5.0817, 4.9926], + device='cuda:2'), covar=tensor([0.0063, 0.0073, 0.0060, 0.0098, 0.0069, 0.0111, 0.0059, 0.0086], + device='cuda:2'), in_proj_covar=tensor([0.0075, 0.0054, 0.0055, 0.0070, 0.0059, 0.0080, 0.0068, 0.0069], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0002, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 00:20:48,003 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3119, 5.8159, 5.4278, 5.5571, 5.3394, 5.3373, 5.8821, 5.8288], + device='cuda:2'), covar=tensor([0.1030, 0.0728, 0.0404, 0.0743, 0.1397, 0.0676, 0.0483, 0.0608], + device='cuda:2'), in_proj_covar=tensor([0.0494, 0.0402, 0.0304, 0.0445, 0.0599, 0.0443, 0.0564, 0.0420], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 00:20:53,927 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32571.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:20:56,533 INFO [train.py:898] (2/4) Epoch 9, batch 3500, loss[loss=0.1685, simple_loss=0.25, pruned_loss=0.04345, over 18251.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2809, pruned_loss=0.05703, over 3579068.33 frames. ], batch size: 47, lr: 1.23e-02, grad_scale: 8.0 +2023-03-09 00:21:06,898 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32582.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:21:34,284 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32606.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:21:47,772 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32619.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:21:52,018 INFO [train.py:898] (2/4) Epoch 9, batch 3550, loss[loss=0.2262, simple_loss=0.3044, pruned_loss=0.07396, over 13037.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2805, pruned_loss=0.05691, over 3577605.04 frames. ], batch size: 129, lr: 1.23e-02, grad_scale: 8.0 +2023-03-09 00:22:12,491 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.120e+02 3.503e+02 4.111e+02 5.019e+02 1.415e+03, threshold=8.222e+02, percent-clipped=4.0 +2023-03-09 00:22:25,469 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=32654.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:22:45,802 INFO [train.py:898] (2/4) Epoch 9, batch 3600, loss[loss=0.1974, simple_loss=0.282, pruned_loss=0.05644, over 18296.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2808, pruned_loss=0.05705, over 3575303.37 frames. ], batch size: 54, lr: 1.23e-02, grad_scale: 8.0 +2023-03-09 00:23:01,294 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-09 00:23:53,331 INFO [train.py:898] (2/4) Epoch 10, batch 0, loss[loss=0.2008, simple_loss=0.2878, pruned_loss=0.05684, over 18324.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2878, pruned_loss=0.05684, over 18324.00 frames. ], batch size: 54, lr: 1.17e-02, grad_scale: 8.0 +2023-03-09 00:23:53,332 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 00:24:05,217 INFO [train.py:932] (2/4) Epoch 10, validation: loss=0.1621, simple_loss=0.2651, pruned_loss=0.02958, over 944034.00 frames. +2023-03-09 00:24:05,218 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 00:24:46,642 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.116e+02 3.572e+02 4.322e+02 5.704e+02 1.376e+03, threshold=8.645e+02, percent-clipped=6.0 +2023-03-09 00:25:03,986 INFO [train.py:898] (2/4) Epoch 10, batch 50, loss[loss=0.2006, simple_loss=0.2877, pruned_loss=0.0567, over 16983.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2789, pruned_loss=0.05762, over 813144.66 frames. ], batch size: 78, lr: 1.17e-02, grad_scale: 8.0 +2023-03-09 00:25:06,688 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0899, 5.1030, 4.6737, 5.0734, 5.0089, 4.4372, 5.0132, 4.5906], + device='cuda:2'), covar=tensor([0.0390, 0.0445, 0.1230, 0.0703, 0.0470, 0.0418, 0.0341, 0.1001], + device='cuda:2'), in_proj_covar=tensor([0.0375, 0.0426, 0.0565, 0.0335, 0.0314, 0.0390, 0.0413, 0.0535], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 00:26:02,543 INFO [train.py:898] (2/4) Epoch 10, batch 100, loss[loss=0.2007, simple_loss=0.288, pruned_loss=0.05673, over 18508.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2781, pruned_loss=0.05628, over 1435323.12 frames. ], batch size: 51, lr: 1.17e-02, grad_scale: 8.0 +2023-03-09 00:26:08,926 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-09 00:26:44,169 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.347e+02 3.358e+02 3.908e+02 4.733e+02 8.989e+02, threshold=7.816e+02, percent-clipped=2.0 +2023-03-09 00:27:01,145 INFO [train.py:898] (2/4) Epoch 10, batch 150, loss[loss=0.1655, simple_loss=0.2484, pruned_loss=0.0413, over 18509.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2784, pruned_loss=0.05658, over 1915409.77 frames. ], batch size: 44, lr: 1.17e-02, grad_scale: 8.0 +2023-03-09 00:27:21,500 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-03-09 00:28:00,322 INFO [train.py:898] (2/4) Epoch 10, batch 200, loss[loss=0.2542, simple_loss=0.3234, pruned_loss=0.09251, over 12717.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2788, pruned_loss=0.05633, over 2290751.91 frames. ], batch size: 129, lr: 1.17e-02, grad_scale: 8.0 +2023-03-09 00:28:42,826 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.147e+02 3.153e+02 3.905e+02 4.894e+02 1.439e+03, threshold=7.811e+02, percent-clipped=2.0 +2023-03-09 00:28:51,853 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.3389, 5.4586, 2.8737, 5.2738, 5.2367, 5.5665, 5.4076, 2.9187], + device='cuda:2'), covar=tensor([0.0128, 0.0054, 0.0621, 0.0059, 0.0051, 0.0040, 0.0065, 0.0785], + device='cuda:2'), in_proj_covar=tensor([0.0073, 0.0061, 0.0085, 0.0076, 0.0070, 0.0060, 0.0072, 0.0087], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 00:28:59,426 INFO [train.py:898] (2/4) Epoch 10, batch 250, loss[loss=0.1867, simple_loss=0.2703, pruned_loss=0.05157, over 18271.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2777, pruned_loss=0.05568, over 2584369.37 frames. ], batch size: 49, lr: 1.17e-02, grad_scale: 4.0 +2023-03-09 00:29:09,954 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32966.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:29:58,473 INFO [train.py:898] (2/4) Epoch 10, batch 300, loss[loss=0.2009, simple_loss=0.285, pruned_loss=0.05844, over 18240.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2795, pruned_loss=0.05622, over 2813859.77 frames. ], batch size: 60, lr: 1.16e-02, grad_scale: 4.0 +2023-03-09 00:30:22,061 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33027.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:30:27,805 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4836, 3.7111, 5.3807, 4.4923, 3.4204, 3.3822, 4.6583, 5.3927], + device='cuda:2'), covar=tensor([0.0890, 0.1804, 0.0066, 0.0304, 0.0828, 0.0940, 0.0294, 0.0153], + device='cuda:2'), in_proj_covar=tensor([0.0135, 0.0234, 0.0093, 0.0156, 0.0174, 0.0173, 0.0165, 0.0132], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 00:30:32,231 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8000, 5.3427, 5.2958, 5.3088, 4.8819, 5.2446, 4.6183, 5.2196], + device='cuda:2'), covar=tensor([0.0199, 0.0214, 0.0174, 0.0280, 0.0322, 0.0201, 0.1033, 0.0240], + device='cuda:2'), in_proj_covar=tensor([0.0162, 0.0207, 0.0191, 0.0222, 0.0205, 0.0211, 0.0271, 0.0196], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-09 00:30:40,677 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.108e+02 3.425e+02 4.260e+02 4.893e+02 1.115e+03, threshold=8.520e+02, percent-clipped=1.0 +2023-03-09 00:30:42,764 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.74 vs. limit=5.0 +2023-03-09 00:30:45,664 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9219, 4.9290, 4.9521, 4.9318, 4.8613, 5.5462, 5.1870, 4.8474], + device='cuda:2'), covar=tensor([0.1025, 0.0785, 0.0789, 0.0660, 0.1448, 0.0772, 0.0609, 0.1769], + device='cuda:2'), in_proj_covar=tensor([0.0299, 0.0230, 0.0240, 0.0241, 0.0280, 0.0334, 0.0221, 0.0331], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-09 00:30:57,414 INFO [train.py:898] (2/4) Epoch 10, batch 350, loss[loss=0.1882, simple_loss=0.2608, pruned_loss=0.05774, over 17698.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2798, pruned_loss=0.0565, over 2967560.56 frames. ], batch size: 39, lr: 1.16e-02, grad_scale: 4.0 +2023-03-09 00:31:11,173 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33069.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:31:11,241 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4113, 3.5158, 5.1871, 4.1095, 3.0330, 3.0383, 4.3434, 5.2130], + device='cuda:2'), covar=tensor([0.0890, 0.1928, 0.0086, 0.0415, 0.1058, 0.1138, 0.0388, 0.0150], + device='cuda:2'), in_proj_covar=tensor([0.0137, 0.0237, 0.0094, 0.0158, 0.0176, 0.0175, 0.0168, 0.0134], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 00:31:20,127 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5669, 6.0555, 5.4834, 5.8117, 5.6607, 5.5485, 6.1682, 6.0994], + device='cuda:2'), covar=tensor([0.1070, 0.0619, 0.0368, 0.0586, 0.1225, 0.0635, 0.0454, 0.0489], + device='cuda:2'), in_proj_covar=tensor([0.0500, 0.0405, 0.0303, 0.0451, 0.0606, 0.0451, 0.0573, 0.0423], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 00:31:55,095 INFO [train.py:898] (2/4) Epoch 10, batch 400, loss[loss=0.1642, simple_loss=0.2449, pruned_loss=0.04175, over 18477.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2791, pruned_loss=0.05633, over 3101996.24 frames. ], batch size: 44, lr: 1.16e-02, grad_scale: 8.0 +2023-03-09 00:32:22,380 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33130.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:32:37,084 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.379e+02 3.296e+02 4.038e+02 4.885e+02 1.161e+03, threshold=8.076e+02, percent-clipped=2.0 +2023-03-09 00:32:51,601 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33155.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:32:53,558 INFO [train.py:898] (2/4) Epoch 10, batch 450, loss[loss=0.2044, simple_loss=0.2834, pruned_loss=0.06269, over 18620.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2793, pruned_loss=0.05599, over 3211031.34 frames. ], batch size: 52, lr: 1.16e-02, grad_scale: 8.0 +2023-03-09 00:33:52,229 INFO [train.py:898] (2/4) Epoch 10, batch 500, loss[loss=0.2202, simple_loss=0.3063, pruned_loss=0.06703, over 18481.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2789, pruned_loss=0.05568, over 3298768.04 frames. ], batch size: 59, lr: 1.16e-02, grad_scale: 8.0 +2023-03-09 00:34:03,259 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33216.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:34:21,499 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-09 00:34:33,503 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.970e+02 3.300e+02 3.836e+02 4.921e+02 1.033e+03, threshold=7.671e+02, percent-clipped=3.0 +2023-03-09 00:34:49,827 INFO [train.py:898] (2/4) Epoch 10, batch 550, loss[loss=0.1863, simple_loss=0.2765, pruned_loss=0.04806, over 18469.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2778, pruned_loss=0.05531, over 3364614.73 frames. ], batch size: 53, lr: 1.16e-02, grad_scale: 8.0 +2023-03-09 00:35:13,306 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9649, 3.7075, 5.2292, 3.0710, 4.3901, 2.6001, 3.0591, 2.1369], + device='cuda:2'), covar=tensor([0.0867, 0.0798, 0.0066, 0.0607, 0.0518, 0.2267, 0.2193, 0.1683], + device='cuda:2'), in_proj_covar=tensor([0.0192, 0.0209, 0.0110, 0.0162, 0.0224, 0.0244, 0.0269, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 00:35:48,948 INFO [train.py:898] (2/4) Epoch 10, batch 600, loss[loss=0.2132, simple_loss=0.2954, pruned_loss=0.06551, over 17962.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2766, pruned_loss=0.05458, over 3417676.85 frames. ], batch size: 65, lr: 1.16e-02, grad_scale: 8.0 +2023-03-09 00:36:06,531 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33322.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:36:26,619 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1573, 3.8699, 5.1806, 2.9131, 4.2250, 2.6529, 3.2672, 2.0982], + device='cuda:2'), covar=tensor([0.0767, 0.0733, 0.0071, 0.0688, 0.0642, 0.2156, 0.2053, 0.1621], + device='cuda:2'), in_proj_covar=tensor([0.0193, 0.0210, 0.0110, 0.0163, 0.0224, 0.0244, 0.0269, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 00:36:30,340 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.217e+02 3.224e+02 3.633e+02 4.391e+02 9.720e+02, threshold=7.266e+02, percent-clipped=2.0 +2023-03-09 00:36:46,438 INFO [train.py:898] (2/4) Epoch 10, batch 650, loss[loss=0.1853, simple_loss=0.2695, pruned_loss=0.05054, over 18553.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2769, pruned_loss=0.05458, over 3464361.80 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 8.0 +2023-03-09 00:37:25,019 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.78 vs. limit=5.0 +2023-03-09 00:37:31,574 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5106, 3.5025, 2.0618, 4.3339, 3.0795, 4.4026, 2.1285, 3.6330], + device='cuda:2'), covar=tensor([0.0492, 0.0653, 0.1283, 0.0391, 0.0764, 0.0266, 0.1177, 0.0404], + device='cuda:2'), in_proj_covar=tensor([0.0181, 0.0204, 0.0174, 0.0225, 0.0174, 0.0226, 0.0188, 0.0179], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 00:37:45,674 INFO [train.py:898] (2/4) Epoch 10, batch 700, loss[loss=0.1609, simple_loss=0.2427, pruned_loss=0.0396, over 18565.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.277, pruned_loss=0.05466, over 3496119.80 frames. ], batch size: 45, lr: 1.16e-02, grad_scale: 8.0 +2023-03-09 00:38:07,304 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33425.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:38:27,845 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.134e+02 3.209e+02 3.744e+02 4.761e+02 1.041e+03, threshold=7.488e+02, percent-clipped=6.0 +2023-03-09 00:38:44,039 INFO [train.py:898] (2/4) Epoch 10, batch 750, loss[loss=0.1888, simple_loss=0.282, pruned_loss=0.04779, over 18540.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2763, pruned_loss=0.05437, over 3522636.58 frames. ], batch size: 49, lr: 1.16e-02, grad_scale: 8.0 +2023-03-09 00:39:01,859 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33472.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:39:40,642 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-03-09 00:39:42,266 INFO [train.py:898] (2/4) Epoch 10, batch 800, loss[loss=0.2076, simple_loss=0.2963, pruned_loss=0.0595, over 18390.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2764, pruned_loss=0.0547, over 3534965.09 frames. ], batch size: 52, lr: 1.16e-02, grad_scale: 8.0 +2023-03-09 00:39:47,419 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33511.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:40:08,605 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0478, 2.4293, 2.1109, 2.4990, 3.0417, 3.1693, 2.7266, 2.4596], + device='cuda:2'), covar=tensor([0.0223, 0.0283, 0.0736, 0.0363, 0.0203, 0.0143, 0.0398, 0.0352], + device='cuda:2'), in_proj_covar=tensor([0.0111, 0.0101, 0.0150, 0.0131, 0.0096, 0.0082, 0.0128, 0.0121], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 00:40:13,163 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33533.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:40:24,513 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.088e+02 3.240e+02 4.102e+02 4.783e+02 1.200e+03, threshold=8.205e+02, percent-clipped=1.0 +2023-03-09 00:40:40,565 INFO [train.py:898] (2/4) Epoch 10, batch 850, loss[loss=0.153, simple_loss=0.2338, pruned_loss=0.0361, over 18482.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.277, pruned_loss=0.05521, over 3546359.50 frames. ], batch size: 44, lr: 1.16e-02, grad_scale: 8.0 +2023-03-09 00:41:39,910 INFO [train.py:898] (2/4) Epoch 10, batch 900, loss[loss=0.1964, simple_loss=0.2848, pruned_loss=0.05404, over 18365.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2776, pruned_loss=0.05549, over 3545946.70 frames. ], batch size: 56, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:41:57,558 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33622.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:42:22,395 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.227e+02 3.532e+02 4.038e+02 4.740e+02 8.485e+02, threshold=8.075e+02, percent-clipped=1.0 +2023-03-09 00:42:30,407 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.35 vs. limit=5.0 +2023-03-09 00:42:38,550 INFO [train.py:898] (2/4) Epoch 10, batch 950, loss[loss=0.1974, simple_loss=0.2905, pruned_loss=0.05218, over 18042.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2783, pruned_loss=0.05532, over 3557112.59 frames. ], batch size: 65, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:42:53,738 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33670.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:43:01,663 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33677.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:43:11,546 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.67 vs. limit=5.0 +2023-03-09 00:43:18,036 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9035, 4.8887, 5.0178, 4.9378, 4.8540, 5.5350, 5.1689, 4.9905], + device='cuda:2'), covar=tensor([0.0994, 0.0839, 0.0724, 0.0788, 0.1391, 0.0863, 0.0698, 0.1869], + device='cuda:2'), in_proj_covar=tensor([0.0297, 0.0230, 0.0241, 0.0241, 0.0284, 0.0341, 0.0225, 0.0332], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-09 00:43:20,725 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-03-09 00:43:23,934 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5294, 2.7743, 3.6771, 3.6285, 2.5775, 4.0670, 3.7547, 2.7167], + device='cuda:2'), covar=tensor([0.0419, 0.1255, 0.0274, 0.0264, 0.1415, 0.0204, 0.0464, 0.1050], + device='cuda:2'), in_proj_covar=tensor([0.0183, 0.0221, 0.0145, 0.0137, 0.0210, 0.0179, 0.0201, 0.0196], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 00:43:36,487 INFO [train.py:898] (2/4) Epoch 10, batch 1000, loss[loss=0.1831, simple_loss=0.2609, pruned_loss=0.05266, over 18463.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2791, pruned_loss=0.05573, over 3563759.43 frames. ], batch size: 43, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:43:57,424 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33725.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:44:13,392 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33738.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 00:44:18,697 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.319e+02 3.380e+02 3.936e+02 4.816e+02 9.502e+02, threshold=7.872e+02, percent-clipped=1.0 +2023-03-09 00:44:35,204 INFO [train.py:898] (2/4) Epoch 10, batch 1050, loss[loss=0.1831, simple_loss=0.2608, pruned_loss=0.05265, over 18513.00 frames. ], tot_loss[loss=0.195, simple_loss=0.279, pruned_loss=0.05553, over 3575379.11 frames. ], batch size: 44, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:44:37,211 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-03-09 00:44:37,742 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8058, 5.4155, 5.3852, 5.3374, 4.9215, 5.2342, 4.6335, 5.2556], + device='cuda:2'), covar=tensor([0.0194, 0.0223, 0.0165, 0.0287, 0.0321, 0.0228, 0.1118, 0.0279], + device='cuda:2'), in_proj_covar=tensor([0.0167, 0.0211, 0.0196, 0.0227, 0.0208, 0.0216, 0.0278, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-09 00:44:38,927 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5523, 4.2101, 4.3452, 3.1788, 3.5483, 3.4064, 2.4968, 2.0723], + device='cuda:2'), covar=tensor([0.0218, 0.0177, 0.0078, 0.0256, 0.0345, 0.0220, 0.0690, 0.0872], + device='cuda:2'), in_proj_covar=tensor([0.0056, 0.0046, 0.0044, 0.0056, 0.0077, 0.0055, 0.0069, 0.0075], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 00:44:53,726 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33773.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:44:53,896 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8473, 4.7927, 4.9148, 4.6467, 4.5862, 4.6961, 5.0037, 4.9961], + device='cuda:2'), covar=tensor([0.0051, 0.0064, 0.0058, 0.0081, 0.0073, 0.0100, 0.0070, 0.0076], + device='cuda:2'), in_proj_covar=tensor([0.0076, 0.0055, 0.0057, 0.0070, 0.0060, 0.0083, 0.0069, 0.0070], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 00:45:34,467 INFO [train.py:898] (2/4) Epoch 10, batch 1100, loss[loss=0.2103, simple_loss=0.2912, pruned_loss=0.06467, over 18617.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.279, pruned_loss=0.05559, over 3571079.37 frames. ], batch size: 52, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:45:39,300 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33811.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:45:58,710 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33828.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:46:16,688 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.260e+02 3.207e+02 4.126e+02 4.735e+02 1.316e+03, threshold=8.252e+02, percent-clipped=4.0 +2023-03-09 00:46:32,949 INFO [train.py:898] (2/4) Epoch 10, batch 1150, loss[loss=0.1678, simple_loss=0.2424, pruned_loss=0.04655, over 17638.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2774, pruned_loss=0.05479, over 3578302.86 frames. ], batch size: 39, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:46:36,071 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=33859.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:47:01,251 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8215, 4.4511, 4.5431, 3.3666, 3.6827, 3.7000, 2.5999, 2.1646], + device='cuda:2'), covar=tensor([0.0184, 0.0144, 0.0077, 0.0236, 0.0316, 0.0184, 0.0652, 0.0839], + device='cuda:2'), in_proj_covar=tensor([0.0056, 0.0046, 0.0045, 0.0057, 0.0076, 0.0055, 0.0070, 0.0075], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 00:47:27,462 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8023, 3.7663, 4.8527, 2.8682, 4.2126, 2.6642, 2.8751, 1.9209], + device='cuda:2'), covar=tensor([0.0885, 0.0709, 0.0111, 0.0708, 0.0492, 0.2061, 0.2373, 0.1723], + device='cuda:2'), in_proj_covar=tensor([0.0192, 0.0210, 0.0110, 0.0162, 0.0223, 0.0242, 0.0271, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 00:47:31,464 INFO [train.py:898] (2/4) Epoch 10, batch 1200, loss[loss=0.1822, simple_loss=0.2674, pruned_loss=0.04855, over 18302.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2773, pruned_loss=0.05483, over 3583281.01 frames. ], batch size: 54, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:48:09,172 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33939.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:48:13,371 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.163e+02 3.321e+02 3.896e+02 4.849e+02 9.830e+02, threshold=7.793e+02, percent-clipped=3.0 +2023-03-09 00:48:29,980 INFO [train.py:898] (2/4) Epoch 10, batch 1250, loss[loss=0.1805, simple_loss=0.2621, pruned_loss=0.04943, over 18267.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2772, pruned_loss=0.05496, over 3587645.52 frames. ], batch size: 47, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:49:07,890 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-09 00:49:24,375 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34000.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:49:32,597 INFO [train.py:898] (2/4) Epoch 10, batch 1300, loss[loss=0.1676, simple_loss=0.2493, pruned_loss=0.04294, over 18253.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2777, pruned_loss=0.05514, over 3587351.82 frames. ], batch size: 45, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:50:02,584 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34033.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 00:50:14,325 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.927e+02 3.198e+02 3.729e+02 4.411e+02 1.072e+03, threshold=7.459e+02, percent-clipped=4.0 +2023-03-09 00:50:15,937 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34044.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:50:30,795 INFO [train.py:898] (2/4) Epoch 10, batch 1350, loss[loss=0.2182, simple_loss=0.2988, pruned_loss=0.06874, over 18437.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2782, pruned_loss=0.05545, over 3572081.69 frames. ], batch size: 59, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:51:17,642 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-09 00:51:27,362 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34105.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:51:29,261 INFO [train.py:898] (2/4) Epoch 10, batch 1400, loss[loss=0.1952, simple_loss=0.2771, pruned_loss=0.05664, over 16040.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2781, pruned_loss=0.05562, over 3558728.27 frames. ], batch size: 94, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:51:54,996 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34128.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:52:11,991 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.227e+02 3.479e+02 4.188e+02 5.259e+02 1.052e+03, threshold=8.376e+02, percent-clipped=5.0 +2023-03-09 00:52:15,181 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34145.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:52:28,410 INFO [train.py:898] (2/4) Epoch 10, batch 1450, loss[loss=0.2266, simple_loss=0.3068, pruned_loss=0.07317, over 18483.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2781, pruned_loss=0.05574, over 3560505.06 frames. ], batch size: 59, lr: 1.15e-02, grad_scale: 8.0 +2023-03-09 00:52:50,504 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34176.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:53:26,186 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34206.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:53:26,947 INFO [train.py:898] (2/4) Epoch 10, batch 1500, loss[loss=0.2332, simple_loss=0.303, pruned_loss=0.08169, over 12363.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2784, pruned_loss=0.05574, over 3551896.66 frames. ], batch size: 129, lr: 1.14e-02, grad_scale: 8.0 +2023-03-09 00:53:43,230 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-09 00:54:08,715 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.894e+02 3.400e+02 4.165e+02 5.471e+02 1.005e+03, threshold=8.329e+02, percent-clipped=3.0 +2023-03-09 00:54:25,066 INFO [train.py:898] (2/4) Epoch 10, batch 1550, loss[loss=0.2253, simple_loss=0.2957, pruned_loss=0.07742, over 12366.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2782, pruned_loss=0.05571, over 3555569.71 frames. ], batch size: 130, lr: 1.14e-02, grad_scale: 8.0 +2023-03-09 00:55:09,435 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34295.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:55:23,418 INFO [train.py:898] (2/4) Epoch 10, batch 1600, loss[loss=0.1725, simple_loss=0.2664, pruned_loss=0.03931, over 18268.00 frames. ], tot_loss[loss=0.194, simple_loss=0.2777, pruned_loss=0.05519, over 3571489.08 frames. ], batch size: 49, lr: 1.14e-02, grad_scale: 8.0 +2023-03-09 00:55:41,270 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8505, 4.9069, 4.9635, 4.7494, 4.6536, 4.7299, 5.1200, 5.0516], + device='cuda:2'), covar=tensor([0.0062, 0.0075, 0.0063, 0.0083, 0.0065, 0.0115, 0.0059, 0.0106], + device='cuda:2'), in_proj_covar=tensor([0.0077, 0.0055, 0.0057, 0.0072, 0.0061, 0.0084, 0.0070, 0.0070], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 00:55:53,855 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34333.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:56:05,594 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.752e+02 3.318e+02 3.747e+02 4.447e+02 1.015e+03, threshold=7.495e+02, percent-clipped=2.0 +2023-03-09 00:56:21,843 INFO [train.py:898] (2/4) Epoch 10, batch 1650, loss[loss=0.1914, simple_loss=0.289, pruned_loss=0.04695, over 18551.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2776, pruned_loss=0.05504, over 3583208.18 frames. ], batch size: 49, lr: 1.14e-02, grad_scale: 8.0 +2023-03-09 00:56:49,659 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34381.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:57:11,874 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34400.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:57:20,096 INFO [train.py:898] (2/4) Epoch 10, batch 1700, loss[loss=0.1864, simple_loss=0.2767, pruned_loss=0.04805, over 18473.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2774, pruned_loss=0.05488, over 3587592.12 frames. ], batch size: 53, lr: 1.14e-02, grad_scale: 4.0 +2023-03-09 00:57:20,483 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8724, 4.4659, 4.7190, 3.2775, 3.6609, 3.6765, 2.5622, 2.1912], + device='cuda:2'), covar=tensor([0.0210, 0.0153, 0.0058, 0.0315, 0.0367, 0.0199, 0.0784, 0.1001], + device='cuda:2'), in_proj_covar=tensor([0.0058, 0.0047, 0.0045, 0.0058, 0.0079, 0.0055, 0.0071, 0.0077], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 00:58:03,526 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.339e+02 3.311e+02 3.796e+02 4.465e+02 1.114e+03, threshold=7.593e+02, percent-clipped=2.0 +2023-03-09 00:58:18,863 INFO [train.py:898] (2/4) Epoch 10, batch 1750, loss[loss=0.2015, simple_loss=0.2921, pruned_loss=0.05543, over 18238.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2767, pruned_loss=0.05451, over 3588089.94 frames. ], batch size: 60, lr: 1.14e-02, grad_scale: 4.0 +2023-03-09 00:58:20,965 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4393, 3.3000, 3.0776, 2.6312, 3.0692, 2.3098, 2.4080, 3.2334], + device='cuda:2'), covar=tensor([0.0045, 0.0096, 0.0107, 0.0153, 0.0094, 0.0226, 0.0227, 0.0071], + device='cuda:2'), in_proj_covar=tensor([0.0086, 0.0112, 0.0101, 0.0149, 0.0101, 0.0146, 0.0152, 0.0083], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 00:58:57,287 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5949, 2.8268, 4.3379, 3.9376, 2.8467, 4.6561, 3.9485, 2.8562], + device='cuda:2'), covar=tensor([0.0428, 0.1475, 0.0173, 0.0284, 0.1420, 0.0188, 0.0460, 0.1011], + device='cuda:2'), in_proj_covar=tensor([0.0186, 0.0219, 0.0146, 0.0140, 0.0210, 0.0182, 0.0202, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 00:59:06,376 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0531, 5.6155, 5.1923, 5.4012, 5.1636, 5.0998, 5.6896, 5.6398], + device='cuda:2'), covar=tensor([0.1292, 0.0710, 0.0549, 0.0704, 0.1554, 0.0736, 0.0570, 0.0588], + device='cuda:2'), in_proj_covar=tensor([0.0513, 0.0422, 0.0321, 0.0460, 0.0631, 0.0463, 0.0590, 0.0434], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 00:59:11,441 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34501.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 00:59:18,088 INFO [train.py:898] (2/4) Epoch 10, batch 1800, loss[loss=0.1891, simple_loss=0.2747, pruned_loss=0.05181, over 18226.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2767, pruned_loss=0.05444, over 3592406.31 frames. ], batch size: 60, lr: 1.14e-02, grad_scale: 4.0 +2023-03-09 01:00:01,443 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.848e+02 3.049e+02 3.588e+02 4.503e+02 1.027e+03, threshold=7.176e+02, percent-clipped=5.0 +2023-03-09 01:00:16,829 INFO [train.py:898] (2/4) Epoch 10, batch 1850, loss[loss=0.2165, simple_loss=0.303, pruned_loss=0.06497, over 17971.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2772, pruned_loss=0.05467, over 3589556.00 frames. ], batch size: 65, lr: 1.14e-02, grad_scale: 4.0 +2023-03-09 01:00:17,284 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7657, 4.4501, 4.6250, 3.3249, 3.6295, 3.4605, 2.5740, 2.3348], + device='cuda:2'), covar=tensor([0.0218, 0.0137, 0.0058, 0.0261, 0.0358, 0.0214, 0.0723, 0.0819], + device='cuda:2'), in_proj_covar=tensor([0.0058, 0.0046, 0.0045, 0.0057, 0.0078, 0.0055, 0.0071, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 01:00:33,058 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-03-09 01:00:35,062 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-09 01:00:59,517 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34593.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:01:00,615 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3486, 5.9980, 5.4663, 5.7307, 5.4883, 5.4904, 6.0516, 5.9886], + device='cuda:2'), covar=tensor([0.1240, 0.0692, 0.0412, 0.0711, 0.1513, 0.0649, 0.0493, 0.0585], + device='cuda:2'), in_proj_covar=tensor([0.0523, 0.0425, 0.0325, 0.0469, 0.0641, 0.0471, 0.0599, 0.0440], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 01:01:01,862 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34595.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:01:15,581 INFO [train.py:898] (2/4) Epoch 10, batch 1900, loss[loss=0.1638, simple_loss=0.2421, pruned_loss=0.04273, over 18431.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2776, pruned_loss=0.05515, over 3595795.20 frames. ], batch size: 43, lr: 1.14e-02, grad_scale: 4.0 +2023-03-09 01:01:33,785 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34622.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:01:33,988 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-09 01:01:38,192 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34626.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:01:58,494 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34643.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:01:59,384 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.282e+02 3.244e+02 3.774e+02 4.780e+02 1.001e+03, threshold=7.549e+02, percent-clipped=4.0 +2023-03-09 01:02:01,398 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-03-09 01:02:11,280 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34654.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:02:14,317 INFO [train.py:898] (2/4) Epoch 10, batch 1950, loss[loss=0.1744, simple_loss=0.251, pruned_loss=0.04893, over 18391.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2786, pruned_loss=0.05567, over 3592583.88 frames. ], batch size: 42, lr: 1.14e-02, grad_scale: 4.0 +2023-03-09 01:02:15,159 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-09 01:02:18,083 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4109, 5.3436, 4.9685, 5.3276, 5.2909, 4.7799, 5.2403, 4.9745], + device='cuda:2'), covar=tensor([0.0376, 0.0408, 0.1314, 0.0724, 0.0529, 0.0357, 0.0370, 0.0963], + device='cuda:2'), in_proj_covar=tensor([0.0389, 0.0449, 0.0597, 0.0353, 0.0337, 0.0408, 0.0430, 0.0563], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 01:02:22,343 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-03-09 01:02:44,907 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34683.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 01:02:49,853 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34687.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:03:05,158 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34700.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:03:07,792 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 01:03:10,591 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9705, 4.9345, 4.5609, 4.8808, 4.8702, 4.4170, 4.8481, 4.5825], + device='cuda:2'), covar=tensor([0.0374, 0.0415, 0.1275, 0.0722, 0.0509, 0.0361, 0.0359, 0.0950], + device='cuda:2'), in_proj_covar=tensor([0.0386, 0.0445, 0.0596, 0.0351, 0.0334, 0.0406, 0.0428, 0.0560], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 01:03:12,471 INFO [train.py:898] (2/4) Epoch 10, batch 2000, loss[loss=0.2166, simple_loss=0.2967, pruned_loss=0.06828, over 18239.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2794, pruned_loss=0.05645, over 3578940.43 frames. ], batch size: 60, lr: 1.14e-02, grad_scale: 8.0 +2023-03-09 01:03:16,411 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34710.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 01:03:56,810 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.230e+02 3.366e+02 4.037e+02 4.949e+02 9.171e+02, threshold=8.073e+02, percent-clipped=4.0 +2023-03-09 01:04:01,495 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34748.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:04:11,469 INFO [train.py:898] (2/4) Epoch 10, batch 2050, loss[loss=0.1947, simple_loss=0.2745, pruned_loss=0.05741, over 18491.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2793, pruned_loss=0.05665, over 3575387.32 frames. ], batch size: 53, lr: 1.14e-02, grad_scale: 8.0 +2023-03-09 01:04:28,637 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34771.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 01:05:04,188 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34801.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:05:10,832 INFO [train.py:898] (2/4) Epoch 10, batch 2100, loss[loss=0.2561, simple_loss=0.3193, pruned_loss=0.09648, over 12103.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2792, pruned_loss=0.0563, over 3577928.52 frames. ], batch size: 130, lr: 1.14e-02, grad_scale: 8.0 +2023-03-09 01:05:54,597 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.339e+02 3.283e+02 3.862e+02 4.779e+02 1.024e+03, threshold=7.723e+02, percent-clipped=2.0 +2023-03-09 01:06:01,099 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=34849.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:06:09,704 INFO [train.py:898] (2/4) Epoch 10, batch 2150, loss[loss=0.1686, simple_loss=0.245, pruned_loss=0.04605, over 18139.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.279, pruned_loss=0.05611, over 3581395.15 frames. ], batch size: 44, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:06:55,585 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-09 01:07:07,670 INFO [train.py:898] (2/4) Epoch 10, batch 2200, loss[loss=0.2179, simple_loss=0.3021, pruned_loss=0.06687, over 18279.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2786, pruned_loss=0.05581, over 3590848.65 frames. ], batch size: 57, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:07:42,316 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0754, 5.4227, 2.7909, 5.1601, 5.1175, 5.4528, 5.2269, 2.7547], + device='cuda:2'), covar=tensor([0.0169, 0.0046, 0.0740, 0.0075, 0.0059, 0.0047, 0.0078, 0.0943], + device='cuda:2'), in_proj_covar=tensor([0.0074, 0.0064, 0.0087, 0.0079, 0.0073, 0.0063, 0.0075, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 01:07:50,018 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.072e+02 3.371e+02 4.058e+02 4.999e+02 1.282e+03, threshold=8.115e+02, percent-clipped=5.0 +2023-03-09 01:07:57,096 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34949.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:08:05,878 INFO [train.py:898] (2/4) Epoch 10, batch 2250, loss[loss=0.1738, simple_loss=0.2626, pruned_loss=0.04249, over 18292.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2792, pruned_loss=0.05598, over 3590808.33 frames. ], batch size: 49, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:08:29,630 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34978.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 01:08:34,769 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34982.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:09:05,056 INFO [train.py:898] (2/4) Epoch 10, batch 2300, loss[loss=0.1666, simple_loss=0.2567, pruned_loss=0.03822, over 18261.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2784, pruned_loss=0.05576, over 3580824.15 frames. ], batch size: 47, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:09:15,839 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7077, 3.7526, 3.6408, 3.0807, 3.5257, 2.8749, 2.8258, 3.7983], + device='cuda:2'), covar=tensor([0.0040, 0.0064, 0.0070, 0.0121, 0.0085, 0.0143, 0.0158, 0.0052], + device='cuda:2'), in_proj_covar=tensor([0.0087, 0.0111, 0.0101, 0.0148, 0.0102, 0.0145, 0.0151, 0.0083], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 01:09:30,852 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0465, 4.0518, 5.0871, 3.0295, 4.3453, 2.7490, 3.0338, 1.9184], + device='cuda:2'), covar=tensor([0.0819, 0.0605, 0.0090, 0.0643, 0.0536, 0.2004, 0.2361, 0.1684], + device='cuda:2'), in_proj_covar=tensor([0.0190, 0.0210, 0.0111, 0.0164, 0.0224, 0.0243, 0.0274, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 01:09:48,164 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.223e+02 3.414e+02 4.118e+02 5.165e+02 1.398e+03, threshold=8.236e+02, percent-clipped=7.0 +2023-03-09 01:10:04,303 INFO [train.py:898] (2/4) Epoch 10, batch 2350, loss[loss=0.1634, simple_loss=0.2395, pruned_loss=0.04365, over 18499.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2782, pruned_loss=0.05551, over 3580117.80 frames. ], batch size: 44, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:10:09,461 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 01:10:14,509 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35066.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 01:10:45,270 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35092.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:11:03,090 INFO [train.py:898] (2/4) Epoch 10, batch 2400, loss[loss=0.2268, simple_loss=0.3091, pruned_loss=0.07224, over 18097.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2778, pruned_loss=0.05541, over 3578512.29 frames. ], batch size: 62, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:11:27,397 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6985, 3.6078, 2.0898, 4.5402, 3.0985, 4.5837, 2.4777, 3.9840], + device='cuda:2'), covar=tensor([0.0555, 0.0680, 0.1442, 0.0384, 0.0869, 0.0182, 0.1169, 0.0399], + device='cuda:2'), in_proj_covar=tensor([0.0186, 0.0208, 0.0176, 0.0226, 0.0179, 0.0227, 0.0190, 0.0180], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 01:11:46,352 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.146e+02 3.097e+02 3.497e+02 4.481e+02 9.242e+02, threshold=6.993e+02, percent-clipped=1.0 +2023-03-09 01:11:50,308 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3242, 4.3063, 2.4568, 4.2220, 5.3277, 2.4241, 3.8941, 3.8890], + device='cuda:2'), covar=tensor([0.0061, 0.0910, 0.1513, 0.0532, 0.0038, 0.1306, 0.0590, 0.0726], + device='cuda:2'), in_proj_covar=tensor([0.0109, 0.0223, 0.0190, 0.0190, 0.0087, 0.0174, 0.0201, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 01:11:57,253 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35153.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:12:02,048 INFO [train.py:898] (2/4) Epoch 10, batch 2450, loss[loss=0.1718, simple_loss=0.2505, pruned_loss=0.04656, over 18501.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2768, pruned_loss=0.05469, over 3592576.71 frames. ], batch size: 47, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:13:00,745 INFO [train.py:898] (2/4) Epoch 10, batch 2500, loss[loss=0.1975, simple_loss=0.2883, pruned_loss=0.05341, over 18363.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2763, pruned_loss=0.05441, over 3601401.56 frames. ], batch size: 55, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:13:01,142 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2097, 3.4708, 3.4545, 2.9075, 3.1393, 3.1528, 2.5664, 2.2431], + device='cuda:2'), covar=tensor([0.0183, 0.0117, 0.0097, 0.0250, 0.0303, 0.0168, 0.0554, 0.0654], + device='cuda:2'), in_proj_covar=tensor([0.0058, 0.0046, 0.0046, 0.0057, 0.0079, 0.0056, 0.0073, 0.0077], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 01:13:37,995 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-03-09 01:13:38,077 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-09 01:13:43,835 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.285e+02 3.181e+02 4.060e+02 4.793e+02 9.479e+02, threshold=8.119e+02, percent-clipped=6.0 +2023-03-09 01:13:50,305 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35249.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:13:58,868 INFO [train.py:898] (2/4) Epoch 10, batch 2550, loss[loss=0.1938, simple_loss=0.2838, pruned_loss=0.05189, over 18241.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2764, pruned_loss=0.05455, over 3596860.08 frames. ], batch size: 60, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:14:23,550 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35278.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 01:14:28,038 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35282.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:14:45,832 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35297.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:14:46,364 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-03-09 01:14:51,170 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3875, 2.6544, 2.5839, 2.7135, 3.4517, 3.2753, 2.9001, 2.8321], + device='cuda:2'), covar=tensor([0.0189, 0.0316, 0.0562, 0.0356, 0.0191, 0.0198, 0.0389, 0.0339], + device='cuda:2'), in_proj_covar=tensor([0.0115, 0.0103, 0.0150, 0.0135, 0.0099, 0.0084, 0.0132, 0.0126], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 01:14:57,513 INFO [train.py:898] (2/4) Epoch 10, batch 2600, loss[loss=0.1955, simple_loss=0.2881, pruned_loss=0.0515, over 18359.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2766, pruned_loss=0.05456, over 3584707.72 frames. ], batch size: 46, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:15:05,100 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9331, 4.6087, 4.7048, 3.5690, 3.8273, 3.9295, 2.9111, 2.4219], + device='cuda:2'), covar=tensor([0.0214, 0.0179, 0.0060, 0.0247, 0.0320, 0.0156, 0.0637, 0.0866], + device='cuda:2'), in_proj_covar=tensor([0.0057, 0.0047, 0.0045, 0.0056, 0.0078, 0.0055, 0.0072, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 01:15:20,682 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35326.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:15:25,251 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35330.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:15:40,655 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.265e+02 3.407e+02 3.893e+02 4.729e+02 1.117e+03, threshold=7.786e+02, percent-clipped=1.0 +2023-03-09 01:15:42,234 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35345.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 01:15:56,563 INFO [train.py:898] (2/4) Epoch 10, batch 2650, loss[loss=0.2007, simple_loss=0.2825, pruned_loss=0.05947, over 18484.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2767, pruned_loss=0.05433, over 3580432.04 frames. ], batch size: 53, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:16:08,230 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35366.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 01:16:17,183 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35374.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:16:52,879 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.69 vs. limit=5.0 +2023-03-09 01:16:54,671 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35406.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 01:16:55,351 INFO [train.py:898] (2/4) Epoch 10, batch 2700, loss[loss=0.1817, simple_loss=0.2696, pruned_loss=0.04692, over 18270.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2775, pruned_loss=0.05506, over 3553354.65 frames. ], batch size: 47, lr: 1.13e-02, grad_scale: 8.0 +2023-03-09 01:17:03,987 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35414.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 01:17:04,225 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8553, 3.7273, 5.2822, 3.1697, 4.5240, 2.7332, 3.2039, 2.0791], + device='cuda:2'), covar=tensor([0.0792, 0.0726, 0.0066, 0.0520, 0.0416, 0.1946, 0.2033, 0.1578], + device='cuda:2'), in_proj_covar=tensor([0.0192, 0.0213, 0.0114, 0.0166, 0.0226, 0.0244, 0.0276, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 01:17:07,376 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2267, 5.1728, 5.5192, 5.2673, 5.1591, 6.0243, 5.5774, 5.2637], + device='cuda:2'), covar=tensor([0.0908, 0.0624, 0.0630, 0.0782, 0.1416, 0.0700, 0.0572, 0.1740], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0228, 0.0245, 0.0245, 0.0282, 0.0343, 0.0227, 0.0334], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-09 01:17:28,502 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35435.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:17:37,961 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.429e+02 3.322e+02 4.284e+02 5.081e+02 8.413e+02, threshold=8.569e+02, percent-clipped=3.0 +2023-03-09 01:17:42,697 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35448.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:17:53,627 INFO [train.py:898] (2/4) Epoch 10, batch 2750, loss[loss=0.1616, simple_loss=0.2464, pruned_loss=0.03843, over 18410.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2784, pruned_loss=0.05548, over 3551991.71 frames. ], batch size: 43, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:17:56,979 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3687, 5.3494, 4.7108, 5.2645, 5.2963, 4.8109, 5.2180, 4.7702], + device='cuda:2'), covar=tensor([0.0463, 0.0462, 0.1772, 0.0897, 0.0652, 0.0443, 0.0456, 0.1021], + device='cuda:2'), in_proj_covar=tensor([0.0390, 0.0445, 0.0590, 0.0349, 0.0334, 0.0406, 0.0434, 0.0561], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 01:18:51,856 INFO [train.py:898] (2/4) Epoch 10, batch 2800, loss[loss=0.2143, simple_loss=0.2945, pruned_loss=0.06708, over 18494.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2787, pruned_loss=0.0555, over 3559343.68 frames. ], batch size: 51, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:19:18,859 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35530.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:19:34,530 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.967e+02 3.480e+02 4.448e+02 5.529e+02 1.797e+03, threshold=8.895e+02, percent-clipped=6.0 +2023-03-09 01:19:49,761 INFO [train.py:898] (2/4) Epoch 10, batch 2850, loss[loss=0.2063, simple_loss=0.2897, pruned_loss=0.06141, over 18571.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2781, pruned_loss=0.05557, over 3560377.99 frames. ], batch size: 54, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:20:00,961 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-09 01:20:19,219 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.86 vs. limit=2.0 +2023-03-09 01:20:29,959 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35591.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:20:36,428 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9118, 4.6833, 4.9061, 3.6662, 3.9886, 3.8387, 3.1682, 2.6930], + device='cuda:2'), covar=tensor([0.0182, 0.0167, 0.0058, 0.0196, 0.0267, 0.0144, 0.0497, 0.0715], + device='cuda:2'), in_proj_covar=tensor([0.0058, 0.0047, 0.0046, 0.0058, 0.0079, 0.0056, 0.0072, 0.0077], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 01:20:47,589 INFO [train.py:898] (2/4) Epoch 10, batch 2900, loss[loss=0.1858, simple_loss=0.2747, pruned_loss=0.04843, over 17784.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.277, pruned_loss=0.0552, over 3569410.65 frames. ], batch size: 70, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:21:17,477 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.2375, 5.4213, 2.9323, 5.2723, 5.1006, 5.4690, 5.3327, 2.8559], + device='cuda:2'), covar=tensor([0.0142, 0.0062, 0.0701, 0.0073, 0.0072, 0.0071, 0.0082, 0.0934], + device='cuda:2'), in_proj_covar=tensor([0.0074, 0.0064, 0.0087, 0.0079, 0.0074, 0.0063, 0.0075, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 01:21:32,210 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.306e+02 3.157e+02 3.765e+02 4.717e+02 8.382e+02, threshold=7.531e+02, percent-clipped=0.0 +2023-03-09 01:21:47,242 INFO [train.py:898] (2/4) Epoch 10, batch 2950, loss[loss=0.1718, simple_loss=0.2535, pruned_loss=0.04504, over 18249.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2764, pruned_loss=0.05475, over 3567471.20 frames. ], batch size: 45, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:21:55,510 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-09 01:22:39,462 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35701.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 01:22:46,136 INFO [train.py:898] (2/4) Epoch 10, batch 3000, loss[loss=0.1496, simple_loss=0.2337, pruned_loss=0.03273, over 18401.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2763, pruned_loss=0.05438, over 3574692.83 frames. ], batch size: 42, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:22:46,137 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 01:22:58,179 INFO [train.py:932] (2/4) Epoch 10, validation: loss=0.1597, simple_loss=0.2619, pruned_loss=0.0287, over 944034.00 frames. +2023-03-09 01:22:58,180 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 01:23:12,574 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.1152, 3.0247, 3.9473, 3.5725, 2.8737, 2.8321, 3.5156, 4.0109], + device='cuda:2'), covar=tensor([0.0871, 0.1185, 0.0136, 0.0329, 0.0768, 0.0936, 0.0384, 0.0328], + device='cuda:2'), in_proj_covar=tensor([0.0134, 0.0234, 0.0098, 0.0156, 0.0173, 0.0171, 0.0168, 0.0138], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 01:23:25,186 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35730.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:23:40,662 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.222e+02 3.466e+02 3.980e+02 4.724e+02 8.688e+02, threshold=7.961e+02, percent-clipped=2.0 +2023-03-09 01:23:45,577 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35748.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:23:56,262 INFO [train.py:898] (2/4) Epoch 10, batch 3050, loss[loss=0.2051, simple_loss=0.2932, pruned_loss=0.05848, over 18486.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2767, pruned_loss=0.05455, over 3576236.84 frames. ], batch size: 59, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:24:13,438 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35771.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:24:42,262 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=35796.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:24:43,953 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.66 vs. limit=5.0 +2023-03-09 01:24:45,835 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1395, 5.2139, 4.7079, 5.0818, 5.1292, 4.4914, 5.0436, 4.8037], + device='cuda:2'), covar=tensor([0.0504, 0.0438, 0.1528, 0.0869, 0.0616, 0.0461, 0.0432, 0.0941], + device='cuda:2'), in_proj_covar=tensor([0.0402, 0.0450, 0.0599, 0.0349, 0.0340, 0.0411, 0.0443, 0.0575], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 01:24:55,231 INFO [train.py:898] (2/4) Epoch 10, batch 3100, loss[loss=0.1799, simple_loss=0.2646, pruned_loss=0.04758, over 18275.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2754, pruned_loss=0.05381, over 3588196.74 frames. ], batch size: 49, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:25:00,746 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35811.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:25:26,934 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35832.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:25:39,854 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.088e+02 3.519e+02 4.020e+02 4.842e+02 1.442e+03, threshold=8.041e+02, percent-clipped=6.0 +2023-03-09 01:25:54,161 INFO [train.py:898] (2/4) Epoch 10, batch 3150, loss[loss=0.1821, simple_loss=0.264, pruned_loss=0.05012, over 18182.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.275, pruned_loss=0.05366, over 3593569.40 frames. ], batch size: 44, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:26:13,001 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35872.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:26:27,915 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9134, 4.0264, 2.4046, 3.9625, 4.9271, 2.4933, 3.6357, 3.4786], + device='cuda:2'), covar=tensor([0.0097, 0.0993, 0.1477, 0.0523, 0.0055, 0.1272, 0.0636, 0.0884], + device='cuda:2'), in_proj_covar=tensor([0.0112, 0.0224, 0.0188, 0.0186, 0.0086, 0.0173, 0.0201, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 01:26:28,851 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35886.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:26:37,269 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-09 01:26:52,116 INFO [train.py:898] (2/4) Epoch 10, batch 3200, loss[loss=0.1732, simple_loss=0.2541, pruned_loss=0.04621, over 18491.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2757, pruned_loss=0.05417, over 3582846.56 frames. ], batch size: 44, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:27:35,147 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.239e+02 3.585e+02 4.251e+02 5.261e+02 1.507e+03, threshold=8.503e+02, percent-clipped=6.0 +2023-03-09 01:27:49,558 INFO [train.py:898] (2/4) Epoch 10, batch 3250, loss[loss=0.188, simple_loss=0.2761, pruned_loss=0.04994, over 18501.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2748, pruned_loss=0.05383, over 3601738.52 frames. ], batch size: 53, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:27:55,941 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-09 01:28:01,521 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6101, 2.1040, 2.8446, 2.7097, 3.4850, 5.2982, 4.7817, 4.2966], + device='cuda:2'), covar=tensor([0.1080, 0.1842, 0.2034, 0.1283, 0.1582, 0.0089, 0.0308, 0.0380], + device='cuda:2'), in_proj_covar=tensor([0.0234, 0.0293, 0.0303, 0.0244, 0.0355, 0.0183, 0.0253, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 01:28:23,392 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-09 01:28:46,188 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36001.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 01:28:52,531 INFO [train.py:898] (2/4) Epoch 10, batch 3300, loss[loss=0.1899, simple_loss=0.2681, pruned_loss=0.05582, over 18260.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2749, pruned_loss=0.05388, over 3600528.65 frames. ], batch size: 47, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:29:19,628 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36030.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:29:35,215 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.117e+02 3.405e+02 4.202e+02 5.092e+02 8.448e+02, threshold=8.405e+02, percent-clipped=0.0 +2023-03-09 01:29:40,841 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36049.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 01:29:49,540 INFO [train.py:898] (2/4) Epoch 10, batch 3350, loss[loss=0.2077, simple_loss=0.2795, pruned_loss=0.06792, over 18289.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2755, pruned_loss=0.05405, over 3599928.14 frames. ], batch size: 49, lr: 1.12e-02, grad_scale: 8.0 +2023-03-09 01:30:13,378 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36078.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:30:47,888 INFO [train.py:898] (2/4) Epoch 10, batch 3400, loss[loss=0.2375, simple_loss=0.3163, pruned_loss=0.07935, over 18450.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2749, pruned_loss=0.05373, over 3608095.30 frames. ], batch size: 59, lr: 1.11e-02, grad_scale: 8.0 +2023-03-09 01:31:03,233 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.60 vs. limit=5.0 +2023-03-09 01:31:10,845 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36127.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:31:31,609 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.609e+02 3.400e+02 4.178e+02 5.234e+02 8.202e+02, threshold=8.355e+02, percent-clipped=0.0 +2023-03-09 01:31:32,743 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-09 01:31:46,924 INFO [train.py:898] (2/4) Epoch 10, batch 3450, loss[loss=0.1822, simple_loss=0.2639, pruned_loss=0.05021, over 18485.00 frames. ], tot_loss[loss=0.191, simple_loss=0.275, pruned_loss=0.05353, over 3595048.37 frames. ], batch size: 44, lr: 1.11e-02, grad_scale: 8.0 +2023-03-09 01:31:58,334 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36167.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:32:07,672 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8176, 3.8471, 5.0056, 2.7309, 4.2370, 2.6802, 2.9711, 2.0173], + device='cuda:2'), covar=tensor([0.1038, 0.0746, 0.0111, 0.0816, 0.0592, 0.2212, 0.2631, 0.1855], + device='cuda:2'), in_proj_covar=tensor([0.0194, 0.0214, 0.0116, 0.0165, 0.0229, 0.0246, 0.0277, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 01:32:14,226 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36181.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:32:20,940 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36186.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:32:45,291 INFO [train.py:898] (2/4) Epoch 10, batch 3500, loss[loss=0.2107, simple_loss=0.2965, pruned_loss=0.06247, over 18124.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.275, pruned_loss=0.05354, over 3586903.52 frames. ], batch size: 62, lr: 1.11e-02, grad_scale: 8.0 +2023-03-09 01:33:09,779 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1934, 4.2058, 2.4360, 4.2442, 5.2385, 2.4802, 3.8046, 3.8423], + device='cuda:2'), covar=tensor([0.0090, 0.1006, 0.1514, 0.0473, 0.0041, 0.1269, 0.0668, 0.0741], + device='cuda:2'), in_proj_covar=tensor([0.0111, 0.0227, 0.0190, 0.0186, 0.0087, 0.0175, 0.0200, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 01:33:16,082 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36234.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:33:20,917 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6211, 3.7465, 5.3880, 4.5696, 3.2205, 3.1128, 4.5182, 5.4769], + device='cuda:2'), covar=tensor([0.0869, 0.1501, 0.0061, 0.0310, 0.0856, 0.0990, 0.0306, 0.0173], + device='cuda:2'), in_proj_covar=tensor([0.0134, 0.0236, 0.0099, 0.0157, 0.0174, 0.0173, 0.0170, 0.0140], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 01:33:25,097 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36242.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:33:26,938 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.239e+02 3.308e+02 3.805e+02 4.814e+02 1.268e+03, threshold=7.610e+02, percent-clipped=2.0 +2023-03-09 01:33:41,579 INFO [train.py:898] (2/4) Epoch 10, batch 3550, loss[loss=0.1986, simple_loss=0.2819, pruned_loss=0.05765, over 18374.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2749, pruned_loss=0.05348, over 3585058.54 frames. ], batch size: 50, lr: 1.11e-02, grad_scale: 8.0 +2023-03-09 01:34:12,613 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6870, 5.3103, 5.2918, 5.2284, 4.7703, 5.1459, 4.4844, 5.1018], + device='cuda:2'), covar=tensor([0.0224, 0.0233, 0.0180, 0.0290, 0.0366, 0.0230, 0.1197, 0.0287], + device='cuda:2'), in_proj_covar=tensor([0.0171, 0.0213, 0.0202, 0.0238, 0.0213, 0.0222, 0.0277, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-09 01:34:36,261 INFO [train.py:898] (2/4) Epoch 10, batch 3600, loss[loss=0.1952, simple_loss=0.2783, pruned_loss=0.05603, over 18503.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2748, pruned_loss=0.0536, over 3577681.62 frames. ], batch size: 47, lr: 1.11e-02, grad_scale: 8.0 +2023-03-09 01:34:47,501 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2790, 2.5078, 3.8565, 3.7080, 2.2655, 4.1677, 3.7548, 2.6988], + device='cuda:2'), covar=tensor([0.0483, 0.1329, 0.0294, 0.0264, 0.1612, 0.0198, 0.0489, 0.0894], + device='cuda:2'), in_proj_covar=tensor([0.0182, 0.0214, 0.0147, 0.0138, 0.0206, 0.0179, 0.0200, 0.0185], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 01:35:41,693 INFO [train.py:898] (2/4) Epoch 11, batch 0, loss[loss=0.2086, simple_loss=0.297, pruned_loss=0.06009, over 18489.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.297, pruned_loss=0.06009, over 18489.00 frames. ], batch size: 53, lr: 1.06e-02, grad_scale: 8.0 +2023-03-09 01:35:41,693 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 01:35:53,391 INFO [train.py:932] (2/4) Epoch 11, validation: loss=0.1597, simple_loss=0.2625, pruned_loss=0.0284, over 944034.00 frames. +2023-03-09 01:35:53,392 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 01:35:56,695 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.034e+02 3.291e+02 3.800e+02 4.653e+02 8.329e+02, threshold=7.601e+02, percent-clipped=2.0 +2023-03-09 01:36:26,622 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9937, 4.8536, 5.0398, 4.7391, 4.7833, 4.8343, 5.2120, 5.1031], + device='cuda:2'), covar=tensor([0.0061, 0.0078, 0.0067, 0.0099, 0.0063, 0.0106, 0.0071, 0.0100], + device='cuda:2'), in_proj_covar=tensor([0.0079, 0.0056, 0.0058, 0.0074, 0.0061, 0.0086, 0.0071, 0.0071], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 01:36:51,729 INFO [train.py:898] (2/4) Epoch 11, batch 50, loss[loss=0.1702, simple_loss=0.2502, pruned_loss=0.04514, over 17790.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2825, pruned_loss=0.05595, over 813220.96 frames. ], batch size: 39, lr: 1.06e-02, grad_scale: 16.0 +2023-03-09 01:37:09,971 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.41 vs. limit=5.0 +2023-03-09 01:37:35,833 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36427.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:37:36,215 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-09 01:37:51,429 INFO [train.py:898] (2/4) Epoch 11, batch 100, loss[loss=0.1688, simple_loss=0.2461, pruned_loss=0.04578, over 18485.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2782, pruned_loss=0.0544, over 1430852.11 frames. ], batch size: 44, lr: 1.06e-02, grad_scale: 16.0 +2023-03-09 01:37:54,911 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.346e+02 3.421e+02 4.363e+02 5.513e+02 1.312e+03, threshold=8.726e+02, percent-clipped=4.0 +2023-03-09 01:38:14,743 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6474, 3.5976, 3.4907, 2.9866, 3.3994, 2.7868, 2.6645, 3.6927], + device='cuda:2'), covar=tensor([0.0038, 0.0061, 0.0062, 0.0129, 0.0072, 0.0146, 0.0172, 0.0046], + device='cuda:2'), in_proj_covar=tensor([0.0093, 0.0114, 0.0102, 0.0151, 0.0102, 0.0145, 0.0156, 0.0087], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 01:38:17,007 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8138, 3.0985, 2.5394, 3.0881, 3.8256, 3.6572, 3.3617, 3.2298], + device='cuda:2'), covar=tensor([0.0152, 0.0263, 0.0591, 0.0226, 0.0144, 0.0150, 0.0222, 0.0263], + device='cuda:2'), in_proj_covar=tensor([0.0115, 0.0101, 0.0146, 0.0131, 0.0099, 0.0085, 0.0128, 0.0127], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 01:38:22,574 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36467.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:38:32,187 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36475.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:38:46,004 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9662, 5.0085, 5.1327, 4.7306, 4.8810, 4.8456, 5.1852, 5.1865], + device='cuda:2'), covar=tensor([0.0055, 0.0058, 0.0052, 0.0089, 0.0050, 0.0100, 0.0059, 0.0076], + device='cuda:2'), in_proj_covar=tensor([0.0079, 0.0055, 0.0058, 0.0074, 0.0061, 0.0086, 0.0071, 0.0071], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 01:38:50,307 INFO [train.py:898] (2/4) Epoch 11, batch 150, loss[loss=0.1696, simple_loss=0.2546, pruned_loss=0.04229, over 18355.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2774, pruned_loss=0.05333, over 1907728.43 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 16.0 +2023-03-09 01:39:18,197 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36515.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:39:44,308 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36537.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:39:44,485 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.2951, 5.6489, 2.8785, 5.3319, 5.2536, 5.6506, 5.5150, 2.8961], + device='cuda:2'), covar=tensor([0.0146, 0.0037, 0.0677, 0.0059, 0.0064, 0.0048, 0.0065, 0.0894], + device='cuda:2'), in_proj_covar=tensor([0.0075, 0.0064, 0.0087, 0.0079, 0.0074, 0.0063, 0.0074, 0.0090], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0003, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 01:39:48,710 INFO [train.py:898] (2/4) Epoch 11, batch 200, loss[loss=0.1651, simple_loss=0.2506, pruned_loss=0.0398, over 18339.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2748, pruned_loss=0.05281, over 2275314.60 frames. ], batch size: 46, lr: 1.06e-02, grad_scale: 16.0 +2023-03-09 01:39:52,130 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.119e+02 3.228e+02 3.660e+02 4.259e+02 9.099e+02, threshold=7.320e+02, percent-clipped=1.0 +2023-03-09 01:40:47,360 INFO [train.py:898] (2/4) Epoch 11, batch 250, loss[loss=0.2019, simple_loss=0.2921, pruned_loss=0.05583, over 17797.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2744, pruned_loss=0.05276, over 2566887.99 frames. ], batch size: 70, lr: 1.06e-02, grad_scale: 8.0 +2023-03-09 01:41:04,172 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36605.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:41:07,730 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36608.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:41:12,313 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36612.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:41:47,060 INFO [train.py:898] (2/4) Epoch 11, batch 300, loss[loss=0.2141, simple_loss=0.3047, pruned_loss=0.06174, over 18405.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2738, pruned_loss=0.05254, over 2795902.99 frames. ], batch size: 52, lr: 1.06e-02, grad_scale: 8.0 +2023-03-09 01:41:51,507 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.170e+02 3.364e+02 4.244e+02 4.969e+02 8.450e+02, threshold=8.489e+02, percent-clipped=1.0 +2023-03-09 01:42:15,677 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36666.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:42:17,114 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-09 01:42:19,084 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36669.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:42:23,466 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36673.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:42:45,383 INFO [train.py:898] (2/4) Epoch 11, batch 350, loss[loss=0.2009, simple_loss=0.2887, pruned_loss=0.0566, over 18481.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2739, pruned_loss=0.0526, over 2980961.47 frames. ], batch size: 53, lr: 1.06e-02, grad_scale: 8.0 +2023-03-09 01:42:55,939 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36700.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 01:43:15,869 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-03-09 01:43:44,244 INFO [train.py:898] (2/4) Epoch 11, batch 400, loss[loss=0.2487, simple_loss=0.3138, pruned_loss=0.0918, over 12662.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2752, pruned_loss=0.05345, over 3103603.39 frames. ], batch size: 129, lr: 1.06e-02, grad_scale: 8.0 +2023-03-09 01:43:48,754 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.208e+02 3.230e+02 3.792e+02 4.617e+02 9.263e+02, threshold=7.584e+02, percent-clipped=1.0 +2023-03-09 01:44:07,398 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36761.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 01:44:42,721 INFO [train.py:898] (2/4) Epoch 11, batch 450, loss[loss=0.1739, simple_loss=0.2521, pruned_loss=0.04787, over 18257.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2752, pruned_loss=0.0535, over 3217250.10 frames. ], batch size: 45, lr: 1.05e-02, grad_scale: 8.0 +2023-03-09 01:44:50,496 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3052, 3.2144, 1.8813, 4.0049, 2.7815, 3.9346, 2.2414, 3.2971], + device='cuda:2'), covar=tensor([0.0505, 0.0755, 0.1402, 0.0371, 0.0818, 0.0292, 0.1097, 0.0483], + device='cuda:2'), in_proj_covar=tensor([0.0193, 0.0213, 0.0180, 0.0238, 0.0184, 0.0241, 0.0193, 0.0187], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 01:44:51,001 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-03-09 01:44:59,814 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36805.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 01:45:36,392 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36837.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:45:41,927 INFO [train.py:898] (2/4) Epoch 11, batch 500, loss[loss=0.1761, simple_loss=0.2693, pruned_loss=0.04143, over 18563.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2748, pruned_loss=0.05299, over 3301423.93 frames. ], batch size: 54, lr: 1.05e-02, grad_scale: 8.0 +2023-03-09 01:45:47,158 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.052e+02 3.251e+02 4.103e+02 5.001e+02 1.385e+03, threshold=8.205e+02, percent-clipped=3.0 +2023-03-09 01:46:12,024 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36866.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 01:46:33,289 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=36885.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:46:39,967 INFO [train.py:898] (2/4) Epoch 11, batch 550, loss[loss=0.1738, simple_loss=0.257, pruned_loss=0.04533, over 18248.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2741, pruned_loss=0.05265, over 3372622.08 frames. ], batch size: 47, lr: 1.05e-02, grad_scale: 8.0 +2023-03-09 01:47:04,820 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36911.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:47:16,110 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.2196, 4.6449, 4.6591, 4.6419, 4.3112, 4.5519, 4.0618, 4.5321], + device='cuda:2'), covar=tensor([0.0248, 0.0279, 0.0210, 0.0371, 0.0318, 0.0235, 0.1023, 0.0322], + device='cuda:2'), in_proj_covar=tensor([0.0176, 0.0217, 0.0207, 0.0243, 0.0221, 0.0227, 0.0285, 0.0212], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-09 01:47:38,613 INFO [train.py:898] (2/4) Epoch 11, batch 600, loss[loss=0.1853, simple_loss=0.2683, pruned_loss=0.05119, over 18499.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2742, pruned_loss=0.05271, over 3416145.73 frames. ], batch size: 44, lr: 1.05e-02, grad_scale: 8.0 +2023-03-09 01:47:42,403 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9891, 4.9596, 5.0887, 5.1165, 4.9912, 5.6983, 5.3263, 4.9864], + device='cuda:2'), covar=tensor([0.0864, 0.0716, 0.0755, 0.0732, 0.1353, 0.0777, 0.0685, 0.1776], + device='cuda:2'), in_proj_covar=tensor([0.0308, 0.0241, 0.0254, 0.0254, 0.0288, 0.0356, 0.0233, 0.0346], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:2') +2023-03-09 01:47:43,326 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.417e+02 3.303e+02 3.773e+02 4.556e+02 8.624e+02, threshold=7.545e+02, percent-clipped=1.0 +2023-03-09 01:48:00,241 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36958.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:48:03,431 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36961.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:48:06,934 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36964.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:48:11,445 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36968.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:48:16,100 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36972.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:48:34,336 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2294, 5.0112, 5.3430, 5.2239, 5.1798, 5.9134, 5.5157, 5.2074], + device='cuda:2'), covar=tensor([0.1078, 0.0679, 0.0816, 0.0650, 0.1358, 0.0725, 0.0655, 0.1587], + device='cuda:2'), in_proj_covar=tensor([0.0303, 0.0238, 0.0250, 0.0250, 0.0285, 0.0352, 0.0231, 0.0341], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-09 01:48:37,460 INFO [train.py:898] (2/4) Epoch 11, batch 650, loss[loss=0.1856, simple_loss=0.2763, pruned_loss=0.04744, over 18307.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2742, pruned_loss=0.05296, over 3446532.37 frames. ], batch size: 54, lr: 1.05e-02, grad_scale: 8.0 +2023-03-09 01:48:54,454 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37004.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:49:12,020 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37019.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:49:36,608 INFO [train.py:898] (2/4) Epoch 11, batch 700, loss[loss=0.1589, simple_loss=0.2355, pruned_loss=0.04119, over 18395.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2741, pruned_loss=0.05328, over 3466194.63 frames. ], batch size: 42, lr: 1.05e-02, grad_scale: 8.0 +2023-03-09 01:49:39,368 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-09 01:49:40,956 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.338e+02 3.308e+02 3.875e+02 4.751e+02 1.116e+03, threshold=7.751e+02, percent-clipped=5.0 +2023-03-09 01:49:54,668 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37056.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 01:50:05,365 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37065.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:50:34,404 INFO [train.py:898] (2/4) Epoch 11, batch 750, loss[loss=0.214, simple_loss=0.3069, pruned_loss=0.0606, over 16999.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.275, pruned_loss=0.05356, over 3493243.44 frames. ], batch size: 78, lr: 1.05e-02, grad_scale: 8.0 +2023-03-09 01:51:16,477 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-09 01:51:17,185 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7082, 5.1582, 5.1400, 5.1396, 4.7914, 5.0586, 4.5127, 5.0536], + device='cuda:2'), covar=tensor([0.0194, 0.0289, 0.0191, 0.0342, 0.0286, 0.0231, 0.1005, 0.0256], + device='cuda:2'), in_proj_covar=tensor([0.0171, 0.0213, 0.0203, 0.0239, 0.0218, 0.0225, 0.0279, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-09 01:51:33,947 INFO [train.py:898] (2/4) Epoch 11, batch 800, loss[loss=0.1977, simple_loss=0.2809, pruned_loss=0.05729, over 17180.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2737, pruned_loss=0.05287, over 3518446.58 frames. ], batch size: 78, lr: 1.05e-02, grad_scale: 8.0 +2023-03-09 01:51:38,503 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.114e+02 3.217e+02 3.576e+02 4.437e+02 1.024e+03, threshold=7.151e+02, percent-clipped=5.0 +2023-03-09 01:51:49,668 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-09 01:51:58,749 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37161.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 01:52:33,369 INFO [train.py:898] (2/4) Epoch 11, batch 850, loss[loss=0.164, simple_loss=0.2475, pruned_loss=0.04032, over 18502.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2734, pruned_loss=0.05269, over 3535470.95 frames. ], batch size: 47, lr: 1.05e-02, grad_scale: 8.0 +2023-03-09 01:52:34,064 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.03 vs. limit=5.0 +2023-03-09 01:53:32,676 INFO [train.py:898] (2/4) Epoch 11, batch 900, loss[loss=0.1754, simple_loss=0.248, pruned_loss=0.05137, over 18507.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2733, pruned_loss=0.05252, over 3554272.09 frames. ], batch size: 47, lr: 1.05e-02, grad_scale: 4.0 +2023-03-09 01:53:38,421 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.869e+02 3.048e+02 3.504e+02 4.549e+02 1.028e+03, threshold=7.008e+02, percent-clipped=4.0 +2023-03-09 01:53:56,437 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37261.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:54:00,529 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37264.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:54:04,627 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37267.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:54:05,882 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37268.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:54:05,911 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37268.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:54:32,482 INFO [train.py:898] (2/4) Epoch 11, batch 950, loss[loss=0.1842, simple_loss=0.2734, pruned_loss=0.0475, over 18321.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2737, pruned_loss=0.05264, over 3571136.52 frames. ], batch size: 56, lr: 1.05e-02, grad_scale: 4.0 +2023-03-09 01:54:47,660 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.46 vs. limit=5.0 +2023-03-09 01:54:49,421 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6441, 4.2272, 4.3694, 3.2444, 3.5451, 3.3369, 2.7097, 2.0411], + device='cuda:2'), covar=tensor([0.0197, 0.0141, 0.0058, 0.0239, 0.0296, 0.0196, 0.0620, 0.0936], + device='cuda:2'), in_proj_covar=tensor([0.0058, 0.0048, 0.0046, 0.0058, 0.0079, 0.0056, 0.0071, 0.0078], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 01:54:52,611 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37309.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:54:53,821 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8739, 4.8969, 4.9280, 4.9660, 4.9275, 5.5752, 5.2164, 5.1209], + device='cuda:2'), covar=tensor([0.0921, 0.0734, 0.0748, 0.0757, 0.1206, 0.0750, 0.0680, 0.1433], + device='cuda:2'), in_proj_covar=tensor([0.0309, 0.0238, 0.0256, 0.0258, 0.0292, 0.0360, 0.0237, 0.0349], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:2') +2023-03-09 01:54:56,034 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37312.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:54:58,792 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37314.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:55:01,082 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37316.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:55:18,040 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37329.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:55:31,162 INFO [train.py:898] (2/4) Epoch 11, batch 1000, loss[loss=0.1668, simple_loss=0.2534, pruned_loss=0.04007, over 18266.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2749, pruned_loss=0.05298, over 3572926.68 frames. ], batch size: 49, lr: 1.05e-02, grad_scale: 4.0 +2023-03-09 01:55:36,612 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.267e+02 3.387e+02 4.010e+02 5.026e+02 9.863e+02, threshold=8.020e+02, percent-clipped=4.0 +2023-03-09 01:55:37,042 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2563, 2.4003, 3.9198, 3.6951, 2.2396, 4.3129, 3.7443, 2.5884], + device='cuda:2'), covar=tensor([0.0469, 0.1936, 0.0308, 0.0264, 0.1966, 0.0224, 0.0491, 0.1263], + device='cuda:2'), in_proj_covar=tensor([0.0184, 0.0221, 0.0147, 0.0143, 0.0209, 0.0182, 0.0207, 0.0188], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 01:55:44,834 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6929, 2.9478, 2.8491, 2.9613, 3.7249, 3.6046, 3.2982, 3.1025], + device='cuda:2'), covar=tensor([0.0192, 0.0293, 0.0528, 0.0299, 0.0192, 0.0148, 0.0295, 0.0317], + device='cuda:2'), in_proj_covar=tensor([0.0116, 0.0104, 0.0151, 0.0135, 0.0101, 0.0088, 0.0135, 0.0128], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 01:55:48,196 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37356.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 01:55:52,751 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37360.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:55:57,529 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37364.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:56:15,348 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5062, 1.9793, 2.6949, 2.7556, 3.4481, 5.1513, 4.6566, 4.0539], + device='cuda:2'), covar=tensor([0.1214, 0.1950, 0.2248, 0.1326, 0.1675, 0.0095, 0.0344, 0.0421], + device='cuda:2'), in_proj_covar=tensor([0.0238, 0.0296, 0.0309, 0.0245, 0.0359, 0.0188, 0.0259, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 01:56:21,714 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-09 01:56:29,972 INFO [train.py:898] (2/4) Epoch 11, batch 1050, loss[loss=0.1442, simple_loss=0.2287, pruned_loss=0.02986, over 18379.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2747, pruned_loss=0.05297, over 3570655.34 frames. ], batch size: 42, lr: 1.05e-02, grad_scale: 4.0 +2023-03-09 01:56:44,662 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37404.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 01:57:04,148 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3251, 2.8381, 2.3798, 2.6690, 3.4385, 3.2904, 2.9224, 2.8495], + device='cuda:2'), covar=tensor([0.0260, 0.0292, 0.0663, 0.0320, 0.0231, 0.0145, 0.0406, 0.0351], + device='cuda:2'), in_proj_covar=tensor([0.0117, 0.0104, 0.0151, 0.0134, 0.0101, 0.0087, 0.0135, 0.0128], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 01:57:09,679 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37425.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 01:57:28,534 INFO [train.py:898] (2/4) Epoch 11, batch 1100, loss[loss=0.1837, simple_loss=0.2786, pruned_loss=0.04438, over 18491.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2746, pruned_loss=0.05289, over 3586778.13 frames. ], batch size: 53, lr: 1.05e-02, grad_scale: 4.0 +2023-03-09 01:57:34,020 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.186e+02 3.484e+02 3.927e+02 4.853e+02 9.182e+02, threshold=7.853e+02, percent-clipped=3.0 +2023-03-09 01:57:51,290 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37461.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 01:57:56,311 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 01:58:27,575 INFO [train.py:898] (2/4) Epoch 11, batch 1150, loss[loss=0.1817, simple_loss=0.2653, pruned_loss=0.0491, over 18496.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2748, pruned_loss=0.05276, over 3584356.92 frames. ], batch size: 47, lr: 1.04e-02, grad_scale: 4.0 +2023-03-09 01:58:32,789 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.19 vs. limit=5.0 +2023-03-09 01:58:47,417 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.2562, 5.5376, 2.7153, 5.3754, 5.2600, 5.6211, 5.4186, 2.9603], + device='cuda:2'), covar=tensor([0.0146, 0.0057, 0.0686, 0.0056, 0.0053, 0.0048, 0.0069, 0.0754], + device='cuda:2'), in_proj_covar=tensor([0.0076, 0.0066, 0.0088, 0.0080, 0.0075, 0.0065, 0.0076, 0.0090], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 01:58:48,401 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37509.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 01:59:26,243 INFO [train.py:898] (2/4) Epoch 11, batch 1200, loss[loss=0.204, simple_loss=0.2968, pruned_loss=0.0556, over 18626.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2754, pruned_loss=0.05274, over 3589744.78 frames. ], batch size: 52, lr: 1.04e-02, grad_scale: 8.0 +2023-03-09 01:59:31,795 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.211e+02 3.034e+02 3.620e+02 4.493e+02 1.296e+03, threshold=7.239e+02, percent-clipped=3.0 +2023-03-09 01:59:55,924 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37567.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:00:24,871 INFO [train.py:898] (2/4) Epoch 11, batch 1250, loss[loss=0.1942, simple_loss=0.2734, pruned_loss=0.05755, over 18550.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2755, pruned_loss=0.05295, over 3594041.28 frames. ], batch size: 49, lr: 1.04e-02, grad_scale: 8.0 +2023-03-09 02:00:28,268 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6464, 2.3153, 2.3226, 2.6171, 3.0399, 3.9408, 3.6808, 3.4182], + device='cuda:2'), covar=tensor([0.0996, 0.1704, 0.2297, 0.1263, 0.1554, 0.0210, 0.0443, 0.0388], + device='cuda:2'), in_proj_covar=tensor([0.0240, 0.0298, 0.0312, 0.0247, 0.0360, 0.0186, 0.0259, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 02:00:52,491 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37614.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:00:53,590 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37615.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:01:04,059 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37624.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:01:12,353 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0334, 3.4067, 2.6075, 3.4600, 4.0182, 2.6197, 3.3160, 3.4044], + device='cuda:2'), covar=tensor([0.0129, 0.1025, 0.1292, 0.0525, 0.0090, 0.1125, 0.0647, 0.0644], + device='cuda:2'), in_proj_covar=tensor([0.0112, 0.0224, 0.0187, 0.0185, 0.0087, 0.0170, 0.0197, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 02:01:24,579 INFO [train.py:898] (2/4) Epoch 11, batch 1300, loss[loss=0.1767, simple_loss=0.2615, pruned_loss=0.04591, over 18511.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2748, pruned_loss=0.05293, over 3582605.04 frames. ], batch size: 47, lr: 1.04e-02, grad_scale: 8.0 +2023-03-09 02:01:31,764 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.066e+02 3.446e+02 4.010e+02 4.726e+02 9.288e+02, threshold=8.020e+02, percent-clipped=3.0 +2023-03-09 02:01:46,658 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37660.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:01:48,851 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37662.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:02:23,187 INFO [train.py:898] (2/4) Epoch 11, batch 1350, loss[loss=0.152, simple_loss=0.2366, pruned_loss=0.03376, over 18256.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2731, pruned_loss=0.05237, over 3585224.65 frames. ], batch size: 45, lr: 1.04e-02, grad_scale: 4.0 +2023-03-09 02:02:43,281 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37708.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:02:56,750 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37720.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:03:21,773 INFO [train.py:898] (2/4) Epoch 11, batch 1400, loss[loss=0.1685, simple_loss=0.2542, pruned_loss=0.04137, over 18536.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2727, pruned_loss=0.05199, over 3582441.10 frames. ], batch size: 49, lr: 1.04e-02, grad_scale: 4.0 +2023-03-09 02:03:27,338 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3408, 2.9225, 2.3702, 2.7617, 3.4993, 3.3477, 2.9417, 2.9861], + device='cuda:2'), covar=tensor([0.0225, 0.0220, 0.0659, 0.0297, 0.0161, 0.0192, 0.0313, 0.0240], + device='cuda:2'), in_proj_covar=tensor([0.0116, 0.0103, 0.0146, 0.0134, 0.0101, 0.0087, 0.0132, 0.0125], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 02:03:29,178 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.922e+02 3.017e+02 3.586e+02 4.269e+02 9.001e+02, threshold=7.171e+02, percent-clipped=1.0 +2023-03-09 02:04:20,146 INFO [train.py:898] (2/4) Epoch 11, batch 1450, loss[loss=0.1697, simple_loss=0.2467, pruned_loss=0.0464, over 17331.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2719, pruned_loss=0.05181, over 3579998.12 frames. ], batch size: 38, lr: 1.04e-02, grad_scale: 4.0 +2023-03-09 02:05:19,788 INFO [train.py:898] (2/4) Epoch 11, batch 1500, loss[loss=0.1701, simple_loss=0.2518, pruned_loss=0.04427, over 18240.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2718, pruned_loss=0.05163, over 3573739.77 frames. ], batch size: 45, lr: 1.04e-02, grad_scale: 4.0 +2023-03-09 02:05:27,761 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.812e+02 3.086e+02 3.620e+02 4.300e+02 1.406e+03, threshold=7.239e+02, percent-clipped=4.0 +2023-03-09 02:06:16,692 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2043, 4.1481, 5.2426, 3.2174, 4.6129, 2.8102, 3.2054, 2.1080], + device='cuda:2'), covar=tensor([0.0760, 0.0702, 0.0077, 0.0631, 0.0482, 0.2204, 0.2429, 0.1695], + device='cuda:2'), in_proj_covar=tensor([0.0196, 0.0214, 0.0118, 0.0168, 0.0226, 0.0246, 0.0278, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 02:06:18,396 INFO [train.py:898] (2/4) Epoch 11, batch 1550, loss[loss=0.2083, simple_loss=0.2882, pruned_loss=0.06418, over 17821.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2724, pruned_loss=0.05195, over 3584128.30 frames. ], batch size: 70, lr: 1.04e-02, grad_scale: 4.0 +2023-03-09 02:06:58,480 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37924.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:07:17,441 INFO [train.py:898] (2/4) Epoch 11, batch 1600, loss[loss=0.1662, simple_loss=0.2485, pruned_loss=0.04193, over 18497.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2727, pruned_loss=0.052, over 3588747.18 frames. ], batch size: 47, lr: 1.04e-02, grad_scale: 8.0 +2023-03-09 02:07:24,289 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.933e+02 3.097e+02 3.749e+02 4.631e+02 9.709e+02, threshold=7.497e+02, percent-clipped=4.0 +2023-03-09 02:07:42,046 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.45 vs. limit=5.0 +2023-03-09 02:07:54,469 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=37972.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:08:02,499 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37979.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:08:15,886 INFO [train.py:898] (2/4) Epoch 11, batch 1650, loss[loss=0.2213, simple_loss=0.2999, pruned_loss=0.07134, over 16266.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2722, pruned_loss=0.05179, over 3579714.14 frames. ], batch size: 94, lr: 1.04e-02, grad_scale: 8.0 +2023-03-09 02:08:18,546 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0910, 5.0610, 5.0566, 4.8630, 4.7546, 4.9485, 5.3106, 5.2646], + device='cuda:2'), covar=tensor([0.0064, 0.0064, 0.0069, 0.0089, 0.0071, 0.0098, 0.0051, 0.0082], + device='cuda:2'), in_proj_covar=tensor([0.0080, 0.0056, 0.0059, 0.0074, 0.0062, 0.0086, 0.0071, 0.0071], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 02:08:55,144 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=38020.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:09:18,070 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38040.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:09:18,736 INFO [train.py:898] (2/4) Epoch 11, batch 1700, loss[loss=0.1899, simple_loss=0.2743, pruned_loss=0.05269, over 17733.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2731, pruned_loss=0.05233, over 3579529.54 frames. ], batch size: 70, lr: 1.04e-02, grad_scale: 8.0 +2023-03-09 02:09:25,206 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.262e+02 3.409e+02 3.922e+02 5.492e+02 2.210e+03, threshold=7.843e+02, percent-clipped=9.0 +2023-03-09 02:09:50,078 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=38068.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:10:10,758 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9549, 3.5855, 4.5518, 4.1419, 3.2811, 4.9068, 4.1768, 3.1885], + device='cuda:2'), covar=tensor([0.0397, 0.0908, 0.0193, 0.0278, 0.1209, 0.0172, 0.0402, 0.0838], + device='cuda:2'), in_proj_covar=tensor([0.0188, 0.0221, 0.0150, 0.0143, 0.0214, 0.0185, 0.0207, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 02:10:16,084 INFO [train.py:898] (2/4) Epoch 11, batch 1750, loss[loss=0.1867, simple_loss=0.2743, pruned_loss=0.04956, over 17861.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.272, pruned_loss=0.05187, over 3591605.52 frames. ], batch size: 70, lr: 1.04e-02, grad_scale: 8.0 +2023-03-09 02:10:23,638 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-03-09 02:11:15,128 INFO [train.py:898] (2/4) Epoch 11, batch 1800, loss[loss=0.17, simple_loss=0.2484, pruned_loss=0.04583, over 18269.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2716, pruned_loss=0.05167, over 3591574.54 frames. ], batch size: 47, lr: 1.04e-02, grad_scale: 8.0 +2023-03-09 02:11:21,535 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.087e+02 3.079e+02 3.713e+02 4.653e+02 8.656e+02, threshold=7.427e+02, percent-clipped=3.0 +2023-03-09 02:11:27,370 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.2653, 3.1777, 4.4906, 3.9968, 2.9546, 2.8669, 3.9366, 4.5979], + device='cuda:2'), covar=tensor([0.0973, 0.1501, 0.0117, 0.0314, 0.0906, 0.1104, 0.0381, 0.0184], + device='cuda:2'), in_proj_covar=tensor([0.0135, 0.0241, 0.0100, 0.0158, 0.0174, 0.0173, 0.0170, 0.0142], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 02:11:41,693 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9673, 2.5530, 2.2252, 2.3589, 3.1724, 2.9380, 2.8294, 2.5899], + device='cuda:2'), covar=tensor([0.0213, 0.0288, 0.0547, 0.0372, 0.0160, 0.0171, 0.0327, 0.0323], + device='cuda:2'), in_proj_covar=tensor([0.0119, 0.0105, 0.0148, 0.0138, 0.0102, 0.0088, 0.0135, 0.0128], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 02:12:01,934 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-03-09 02:12:12,796 INFO [train.py:898] (2/4) Epoch 11, batch 1850, loss[loss=0.2006, simple_loss=0.2923, pruned_loss=0.05442, over 16902.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2727, pruned_loss=0.05223, over 3581092.88 frames. ], batch size: 78, lr: 1.04e-02, grad_scale: 8.0 +2023-03-09 02:13:06,022 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8986, 4.0943, 4.1214, 4.1435, 3.8329, 4.0447, 3.7009, 4.0125], + device='cuda:2'), covar=tensor([0.0228, 0.0354, 0.0227, 0.0407, 0.0324, 0.0228, 0.0919, 0.0324], + device='cuda:2'), in_proj_covar=tensor([0.0174, 0.0219, 0.0206, 0.0249, 0.0222, 0.0225, 0.0282, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-09 02:13:12,754 INFO [train.py:898] (2/4) Epoch 11, batch 1900, loss[loss=0.1816, simple_loss=0.2723, pruned_loss=0.04542, over 18558.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.273, pruned_loss=0.05211, over 3583193.62 frames. ], batch size: 49, lr: 1.03e-02, grad_scale: 8.0 +2023-03-09 02:13:19,697 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.086e+02 3.362e+02 3.965e+02 4.724e+02 1.180e+03, threshold=7.931e+02, percent-clipped=5.0 +2023-03-09 02:13:26,726 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2026, 4.2477, 2.3215, 4.2172, 5.2534, 2.5253, 3.7728, 3.9550], + device='cuda:2'), covar=tensor([0.0085, 0.1011, 0.1673, 0.0562, 0.0043, 0.1348, 0.0706, 0.0724], + device='cuda:2'), in_proj_covar=tensor([0.0114, 0.0230, 0.0190, 0.0187, 0.0090, 0.0172, 0.0199, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 02:14:11,577 INFO [train.py:898] (2/4) Epoch 11, batch 1950, loss[loss=0.222, simple_loss=0.3059, pruned_loss=0.06907, over 18031.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2734, pruned_loss=0.05232, over 3576356.20 frames. ], batch size: 65, lr: 1.03e-02, grad_scale: 8.0 +2023-03-09 02:15:04,238 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=38335.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:15:10,799 INFO [train.py:898] (2/4) Epoch 11, batch 2000, loss[loss=0.1799, simple_loss=0.2556, pruned_loss=0.05213, over 18414.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.273, pruned_loss=0.05202, over 3585885.73 frames. ], batch size: 43, lr: 1.03e-02, grad_scale: 8.0 +2023-03-09 02:15:17,655 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.188e+02 3.163e+02 3.706e+02 4.529e+02 9.366e+02, threshold=7.411e+02, percent-clipped=1.0 +2023-03-09 02:15:32,760 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38360.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:15:35,854 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8658, 2.8422, 4.3110, 4.1564, 2.6324, 4.7018, 4.1044, 3.0058], + device='cuda:2'), covar=tensor([0.0300, 0.1316, 0.0216, 0.0217, 0.1326, 0.0166, 0.0315, 0.0894], + device='cuda:2'), in_proj_covar=tensor([0.0184, 0.0216, 0.0147, 0.0140, 0.0207, 0.0179, 0.0203, 0.0184], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 02:16:08,898 INFO [train.py:898] (2/4) Epoch 11, batch 2050, loss[loss=0.1778, simple_loss=0.2697, pruned_loss=0.043, over 18618.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2734, pruned_loss=0.05221, over 3587130.53 frames. ], batch size: 52, lr: 1.03e-02, grad_scale: 8.0 +2023-03-09 02:16:44,619 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38421.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:16:53,524 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.66 vs. limit=5.0 +2023-03-09 02:17:01,316 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2240, 4.2253, 2.5208, 4.2401, 5.2008, 2.6507, 3.7616, 3.7397], + device='cuda:2'), covar=tensor([0.0078, 0.1121, 0.1508, 0.0530, 0.0053, 0.1216, 0.0706, 0.0822], + device='cuda:2'), in_proj_covar=tensor([0.0113, 0.0227, 0.0186, 0.0183, 0.0090, 0.0171, 0.0197, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 02:17:08,278 INFO [train.py:898] (2/4) Epoch 11, batch 2100, loss[loss=0.1576, simple_loss=0.2478, pruned_loss=0.03373, over 18411.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2731, pruned_loss=0.05246, over 3569807.87 frames. ], batch size: 48, lr: 1.03e-02, grad_scale: 4.0 +2023-03-09 02:17:15,906 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.369e+02 3.250e+02 4.019e+02 4.989e+02 1.105e+03, threshold=8.037e+02, percent-clipped=2.0 +2023-03-09 02:17:21,171 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.36 vs. limit=5.0 +2023-03-09 02:17:42,196 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6427, 2.4890, 4.3148, 4.2622, 2.1553, 4.6605, 3.9176, 2.7166], + device='cuda:2'), covar=tensor([0.0385, 0.1950, 0.0277, 0.0192, 0.2088, 0.0185, 0.0430, 0.1359], + device='cuda:2'), in_proj_covar=tensor([0.0184, 0.0216, 0.0146, 0.0139, 0.0208, 0.0178, 0.0203, 0.0185], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 02:18:06,737 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-09 02:18:07,058 INFO [train.py:898] (2/4) Epoch 11, batch 2150, loss[loss=0.1596, simple_loss=0.2443, pruned_loss=0.03743, over 18357.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2737, pruned_loss=0.0525, over 3568562.65 frames. ], batch size: 46, lr: 1.03e-02, grad_scale: 4.0 +2023-03-09 02:19:05,990 INFO [train.py:898] (2/4) Epoch 11, batch 2200, loss[loss=0.184, simple_loss=0.271, pruned_loss=0.04853, over 18549.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2742, pruned_loss=0.05263, over 3568220.40 frames. ], batch size: 49, lr: 1.03e-02, grad_scale: 4.0 +2023-03-09 02:19:13,831 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.988e+02 3.259e+02 3.995e+02 5.001e+02 1.029e+03, threshold=7.990e+02, percent-clipped=4.0 +2023-03-09 02:19:48,337 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9184, 3.0029, 4.3729, 4.3400, 2.6984, 4.7959, 4.1496, 2.8296], + device='cuda:2'), covar=tensor([0.0310, 0.1222, 0.0183, 0.0182, 0.1406, 0.0134, 0.0346, 0.1052], + device='cuda:2'), in_proj_covar=tensor([0.0185, 0.0215, 0.0147, 0.0139, 0.0208, 0.0178, 0.0203, 0.0187], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 02:20:04,918 INFO [train.py:898] (2/4) Epoch 11, batch 2250, loss[loss=0.1891, simple_loss=0.2812, pruned_loss=0.04849, over 15938.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2729, pruned_loss=0.05186, over 3574476.97 frames. ], batch size: 94, lr: 1.03e-02, grad_scale: 4.0 +2023-03-09 02:20:31,166 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4478, 5.9939, 5.3793, 5.7494, 5.5125, 5.4921, 6.0075, 5.9784], + device='cuda:2'), covar=tensor([0.0934, 0.0603, 0.0498, 0.0600, 0.1221, 0.0610, 0.0515, 0.0563], + device='cuda:2'), in_proj_covar=tensor([0.0515, 0.0427, 0.0328, 0.0466, 0.0634, 0.0468, 0.0610, 0.0449], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 02:20:57,237 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=38635.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:21:04,262 INFO [train.py:898] (2/4) Epoch 11, batch 2300, loss[loss=0.1897, simple_loss=0.2744, pruned_loss=0.05248, over 18252.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2738, pruned_loss=0.05183, over 3580378.63 frames. ], batch size: 47, lr: 1.03e-02, grad_scale: 4.0 +2023-03-09 02:21:12,478 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.254e+02 3.045e+02 3.798e+02 4.329e+02 8.065e+02, threshold=7.597e+02, percent-clipped=1.0 +2023-03-09 02:21:13,341 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-09 02:21:35,889 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0522, 5.0300, 4.6097, 5.0707, 5.0195, 4.2941, 4.8591, 4.5684], + device='cuda:2'), covar=tensor([0.0495, 0.0482, 0.1595, 0.0700, 0.0559, 0.0554, 0.0532, 0.1157], + device='cuda:2'), in_proj_covar=tensor([0.0411, 0.0464, 0.0616, 0.0364, 0.0351, 0.0425, 0.0455, 0.0586], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 02:21:42,168 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.83 vs. limit=2.0 +2023-03-09 02:21:51,494 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6097, 3.9059, 5.2837, 4.3703, 3.2673, 2.9601, 4.7424, 5.3919], + device='cuda:2'), covar=tensor([0.0843, 0.1467, 0.0101, 0.0377, 0.0937, 0.1132, 0.0288, 0.0155], + device='cuda:2'), in_proj_covar=tensor([0.0139, 0.0246, 0.0105, 0.0162, 0.0181, 0.0178, 0.0174, 0.0147], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 02:21:54,108 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=38683.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:22:03,148 INFO [train.py:898] (2/4) Epoch 11, batch 2350, loss[loss=0.2318, simple_loss=0.3165, pruned_loss=0.07354, over 18005.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2744, pruned_loss=0.0522, over 3587254.18 frames. ], batch size: 65, lr: 1.03e-02, grad_scale: 4.0 +2023-03-09 02:22:32,484 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=38716.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:23:01,652 INFO [train.py:898] (2/4) Epoch 11, batch 2400, loss[loss=0.1959, simple_loss=0.2887, pruned_loss=0.05153, over 18359.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2737, pruned_loss=0.05219, over 3578233.99 frames. ], batch size: 55, lr: 1.03e-02, grad_scale: 8.0 +2023-03-09 02:23:10,147 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.250e+02 3.115e+02 4.067e+02 4.907e+02 9.173e+02, threshold=8.134e+02, percent-clipped=4.0 +2023-03-09 02:24:00,847 INFO [train.py:898] (2/4) Epoch 11, batch 2450, loss[loss=0.1846, simple_loss=0.2719, pruned_loss=0.04862, over 18113.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.273, pruned_loss=0.05187, over 3590307.36 frames. ], batch size: 62, lr: 1.03e-02, grad_scale: 8.0 +2023-03-09 02:24:38,805 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38823.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:24:59,957 INFO [train.py:898] (2/4) Epoch 11, batch 2500, loss[loss=0.2092, simple_loss=0.3031, pruned_loss=0.05771, over 18618.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2731, pruned_loss=0.05188, over 3586811.98 frames. ], batch size: 52, lr: 1.03e-02, grad_scale: 8.0 +2023-03-09 02:25:08,484 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.178e+02 3.118e+02 3.887e+02 4.654e+02 1.248e+03, threshold=7.775e+02, percent-clipped=2.0 +2023-03-09 02:25:50,683 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38884.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:25:58,814 INFO [train.py:898] (2/4) Epoch 11, batch 2550, loss[loss=0.1966, simple_loss=0.2874, pruned_loss=0.05287, over 18455.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2731, pruned_loss=0.05185, over 3591278.81 frames. ], batch size: 59, lr: 1.03e-02, grad_scale: 8.0 +2023-03-09 02:26:43,203 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.2152, 5.4291, 2.6177, 5.2947, 5.1493, 5.4547, 5.3314, 2.6848], + device='cuda:2'), covar=tensor([0.0152, 0.0050, 0.0810, 0.0064, 0.0061, 0.0055, 0.0070, 0.0973], + device='cuda:2'), in_proj_covar=tensor([0.0076, 0.0065, 0.0086, 0.0080, 0.0075, 0.0065, 0.0074, 0.0090], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 02:26:57,533 INFO [train.py:898] (2/4) Epoch 11, batch 2600, loss[loss=0.1988, simple_loss=0.2898, pruned_loss=0.05384, over 18573.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2733, pruned_loss=0.05168, over 3597705.17 frames. ], batch size: 54, lr: 1.03e-02, grad_scale: 8.0 +2023-03-09 02:27:06,468 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.982e+02 2.998e+02 3.498e+02 4.234e+02 9.480e+02, threshold=6.995e+02, percent-clipped=2.0 +2023-03-09 02:27:56,936 INFO [train.py:898] (2/4) Epoch 11, batch 2650, loss[loss=0.1844, simple_loss=0.2768, pruned_loss=0.04603, over 18370.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2731, pruned_loss=0.05142, over 3605285.21 frames. ], batch size: 55, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:28:21,873 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4410, 3.9503, 5.2356, 3.0244, 4.6168, 2.6863, 3.1969, 2.1374], + device='cuda:2'), covar=tensor([0.0687, 0.0719, 0.0072, 0.0673, 0.0470, 0.2274, 0.2422, 0.1690], + device='cuda:2'), in_proj_covar=tensor([0.0194, 0.0215, 0.0117, 0.0169, 0.0228, 0.0246, 0.0281, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 02:28:27,572 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39016.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:28:50,834 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5458, 6.0376, 5.5112, 5.8168, 5.6060, 5.5196, 6.0980, 6.0538], + device='cuda:2'), covar=tensor([0.1045, 0.0735, 0.0403, 0.0615, 0.1566, 0.0707, 0.0534, 0.0608], + device='cuda:2'), in_proj_covar=tensor([0.0518, 0.0431, 0.0328, 0.0465, 0.0643, 0.0469, 0.0610, 0.0455], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 02:28:56,251 INFO [train.py:898] (2/4) Epoch 11, batch 2700, loss[loss=0.1508, simple_loss=0.2364, pruned_loss=0.03257, over 18178.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2721, pruned_loss=0.05142, over 3603663.67 frames. ], batch size: 44, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:29:04,927 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.335e+02 3.292e+02 3.968e+02 4.768e+02 1.831e+03, threshold=7.936e+02, percent-clipped=8.0 +2023-03-09 02:29:05,480 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6818, 2.9791, 4.3653, 4.0500, 2.7481, 4.7126, 4.0458, 2.8546], + device='cuda:2'), covar=tensor([0.0418, 0.1189, 0.0171, 0.0256, 0.1225, 0.0137, 0.0418, 0.0907], + device='cuda:2'), in_proj_covar=tensor([0.0187, 0.0219, 0.0148, 0.0140, 0.0207, 0.0182, 0.0207, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 02:29:24,686 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39064.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:29:55,873 INFO [train.py:898] (2/4) Epoch 11, batch 2750, loss[loss=0.1784, simple_loss=0.2611, pruned_loss=0.04788, over 18273.00 frames. ], tot_loss[loss=0.1886, simple_loss=0.2735, pruned_loss=0.05184, over 3603766.28 frames. ], batch size: 45, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:30:55,601 INFO [train.py:898] (2/4) Epoch 11, batch 2800, loss[loss=0.2258, simple_loss=0.3063, pruned_loss=0.07269, over 18085.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2746, pruned_loss=0.0526, over 3573413.01 frames. ], batch size: 62, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:31:04,046 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.053e+02 3.386e+02 4.032e+02 4.876e+02 1.472e+03, threshold=8.064e+02, percent-clipped=5.0 +2023-03-09 02:31:42,086 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39179.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:31:49,015 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39185.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:31:55,188 INFO [train.py:898] (2/4) Epoch 11, batch 2850, loss[loss=0.2114, simple_loss=0.2955, pruned_loss=0.06361, over 18375.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2743, pruned_loss=0.05256, over 3577651.04 frames. ], batch size: 56, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:32:44,415 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3751, 2.6297, 2.4706, 2.6124, 3.4250, 3.2440, 2.9979, 2.7341], + device='cuda:2'), covar=tensor([0.0146, 0.0221, 0.0436, 0.0277, 0.0126, 0.0118, 0.0300, 0.0295], + device='cuda:2'), in_proj_covar=tensor([0.0119, 0.0107, 0.0147, 0.0136, 0.0104, 0.0091, 0.0134, 0.0130], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 02:32:54,403 INFO [train.py:898] (2/4) Epoch 11, batch 2900, loss[loss=0.1885, simple_loss=0.2745, pruned_loss=0.05123, over 17855.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.274, pruned_loss=0.05236, over 3575937.95 frames. ], batch size: 70, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:33:00,516 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39246.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:33:02,135 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.400e+02 3.149e+02 3.663e+02 4.555e+02 1.238e+03, threshold=7.326e+02, percent-clipped=2.0 +2023-03-09 02:33:36,259 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1197, 4.0353, 5.0828, 2.8326, 4.4717, 2.5946, 2.9078, 1.9144], + device='cuda:2'), covar=tensor([0.0814, 0.0610, 0.0092, 0.0709, 0.0485, 0.2273, 0.2553, 0.1694], + device='cuda:2'), in_proj_covar=tensor([0.0193, 0.0212, 0.0117, 0.0167, 0.0225, 0.0244, 0.0277, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 02:33:45,303 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39284.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:33:53,615 INFO [train.py:898] (2/4) Epoch 11, batch 2950, loss[loss=0.1869, simple_loss=0.2818, pruned_loss=0.04593, over 18299.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2738, pruned_loss=0.05234, over 3573701.16 frames. ], batch size: 54, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:34:00,978 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4620, 4.5164, 2.6003, 4.4687, 5.3861, 2.8151, 4.0322, 4.1535], + device='cuda:2'), covar=tensor([0.0068, 0.1048, 0.1480, 0.0475, 0.0047, 0.1173, 0.0554, 0.0692], + device='cuda:2'), in_proj_covar=tensor([0.0117, 0.0232, 0.0190, 0.0185, 0.0089, 0.0175, 0.0200, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 02:34:53,167 INFO [train.py:898] (2/4) Epoch 11, batch 3000, loss[loss=0.2237, simple_loss=0.3055, pruned_loss=0.07091, over 18208.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2744, pruned_loss=0.05268, over 3564329.25 frames. ], batch size: 60, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:34:53,167 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 02:35:05,600 INFO [train.py:932] (2/4) Epoch 11, validation: loss=0.1587, simple_loss=0.2603, pruned_loss=0.02852, over 944034.00 frames. +2023-03-09 02:35:05,600 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 02:35:10,835 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39345.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:35:13,862 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.978e+02 3.242e+02 3.927e+02 4.658e+02 9.416e+02, threshold=7.854e+02, percent-clipped=4.0 +2023-03-09 02:36:04,358 INFO [train.py:898] (2/4) Epoch 11, batch 3050, loss[loss=0.1923, simple_loss=0.2773, pruned_loss=0.05371, over 18482.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2739, pruned_loss=0.05228, over 3568660.26 frames. ], batch size: 51, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:36:18,641 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8200, 3.0544, 4.3504, 3.8708, 2.8370, 4.6581, 4.0803, 3.0879], + device='cuda:2'), covar=tensor([0.0418, 0.1315, 0.0211, 0.0343, 0.1384, 0.0170, 0.0462, 0.0926], + device='cuda:2'), in_proj_covar=tensor([0.0183, 0.0217, 0.0149, 0.0139, 0.0205, 0.0180, 0.0204, 0.0186], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 02:37:04,060 INFO [train.py:898] (2/4) Epoch 11, batch 3100, loss[loss=0.1726, simple_loss=0.2541, pruned_loss=0.04553, over 17696.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2724, pruned_loss=0.0513, over 3582748.18 frames. ], batch size: 39, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:37:04,361 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7940, 5.2507, 5.2578, 5.3128, 4.8496, 5.1453, 4.3560, 5.1778], + device='cuda:2'), covar=tensor([0.0228, 0.0341, 0.0208, 0.0278, 0.0325, 0.0233, 0.1466, 0.0262], + device='cuda:2'), in_proj_covar=tensor([0.0178, 0.0222, 0.0209, 0.0254, 0.0224, 0.0226, 0.0287, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-09 02:37:12,129 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.117e+02 3.287e+02 3.708e+02 4.469e+02 1.141e+03, threshold=7.415e+02, percent-clipped=2.0 +2023-03-09 02:37:30,774 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39463.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:37:34,239 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9651, 4.5087, 4.6811, 3.2780, 3.8233, 3.7734, 2.7163, 2.4079], + device='cuda:2'), covar=tensor([0.0209, 0.0184, 0.0079, 0.0297, 0.0343, 0.0150, 0.0693, 0.0917], + device='cuda:2'), in_proj_covar=tensor([0.0059, 0.0049, 0.0047, 0.0060, 0.0080, 0.0056, 0.0072, 0.0078], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 02:37:49,504 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39479.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:38:02,611 INFO [train.py:898] (2/4) Epoch 11, batch 3150, loss[loss=0.2328, simple_loss=0.3106, pruned_loss=0.0775, over 18241.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2732, pruned_loss=0.0519, over 3573405.08 frames. ], batch size: 60, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:38:42,064 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39524.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:38:45,257 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39527.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:39:01,994 INFO [train.py:898] (2/4) Epoch 11, batch 3200, loss[loss=0.1802, simple_loss=0.2626, pruned_loss=0.0489, over 18379.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2731, pruned_loss=0.05174, over 3589697.51 frames. ], batch size: 50, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:39:02,169 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39541.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:39:09,603 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.046e+02 3.172e+02 3.769e+02 4.644e+02 9.591e+02, threshold=7.537e+02, percent-clipped=4.0 +2023-03-09 02:40:01,201 INFO [train.py:898] (2/4) Epoch 11, batch 3250, loss[loss=0.2129, simple_loss=0.3052, pruned_loss=0.06035, over 18301.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2721, pruned_loss=0.05147, over 3597009.62 frames. ], batch size: 57, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:40:43,126 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.81 vs. limit=2.0 +2023-03-09 02:40:58,375 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39639.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 02:40:59,365 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39640.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:41:00,286 INFO [train.py:898] (2/4) Epoch 11, batch 3300, loss[loss=0.1913, simple_loss=0.28, pruned_loss=0.05129, over 18343.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2721, pruned_loss=0.05137, over 3595022.42 frames. ], batch size: 56, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:41:08,712 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.189e+02 3.111e+02 3.675e+02 4.353e+02 7.934e+02, threshold=7.351e+02, percent-clipped=2.0 +2023-03-09 02:41:59,386 INFO [train.py:898] (2/4) Epoch 11, batch 3350, loss[loss=0.2174, simple_loss=0.304, pruned_loss=0.06534, over 17644.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.272, pruned_loss=0.05155, over 3579578.41 frames. ], batch size: 70, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:42:10,608 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39700.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 02:42:58,348 INFO [train.py:898] (2/4) Epoch 11, batch 3400, loss[loss=0.1664, simple_loss=0.2527, pruned_loss=0.04004, over 18260.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2722, pruned_loss=0.05174, over 3571393.99 frames. ], batch size: 47, lr: 1.02e-02, grad_scale: 8.0 +2023-03-09 02:43:06,473 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.021e+02 3.201e+02 3.760e+02 4.727e+02 8.419e+02, threshold=7.521e+02, percent-clipped=1.0 +2023-03-09 02:43:22,144 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9279, 3.1035, 4.4114, 4.0553, 2.6785, 4.6851, 4.2184, 3.0988], + device='cuda:2'), covar=tensor([0.0422, 0.1289, 0.0183, 0.0336, 0.1544, 0.0188, 0.0369, 0.0900], + device='cuda:2'), in_proj_covar=tensor([0.0191, 0.0224, 0.0154, 0.0146, 0.0214, 0.0187, 0.0210, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 02:43:29,999 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39767.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:43:57,995 INFO [train.py:898] (2/4) Epoch 11, batch 3450, loss[loss=0.1947, simple_loss=0.2852, pruned_loss=0.05208, over 17989.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.272, pruned_loss=0.05134, over 3577365.91 frames. ], batch size: 65, lr: 1.01e-02, grad_scale: 8.0 +2023-03-09 02:44:19,727 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5016, 1.9778, 2.3830, 2.5163, 2.9603, 4.5811, 4.2544, 3.5796], + device='cuda:2'), covar=tensor([0.1476, 0.2517, 0.2974, 0.1635, 0.2485, 0.0165, 0.0427, 0.0548], + device='cuda:2'), in_proj_covar=tensor([0.0244, 0.0299, 0.0315, 0.0246, 0.0359, 0.0187, 0.0260, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 02:44:26,894 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39815.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:44:31,377 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39819.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:44:42,911 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39828.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:44:57,101 INFO [train.py:898] (2/4) Epoch 11, batch 3500, loss[loss=0.1942, simple_loss=0.2758, pruned_loss=0.05627, over 18280.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2728, pruned_loss=0.05181, over 3570027.16 frames. ], batch size: 49, lr: 1.01e-02, grad_scale: 8.0 +2023-03-09 02:44:57,456 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39841.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:45:05,097 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.228e+02 3.240e+02 3.890e+02 4.468e+02 8.251e+02, threshold=7.780e+02, percent-clipped=2.0 +2023-03-09 02:45:37,766 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39876.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:45:52,232 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39889.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:45:54,246 INFO [train.py:898] (2/4) Epoch 11, batch 3550, loss[loss=0.1816, simple_loss=0.2604, pruned_loss=0.05146, over 18284.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2733, pruned_loss=0.0518, over 3582487.29 frames. ], batch size: 45, lr: 1.01e-02, grad_scale: 8.0 +2023-03-09 02:46:35,940 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2399, 4.3179, 2.4295, 4.3132, 5.2491, 2.4551, 3.8274, 4.0773], + device='cuda:2'), covar=tensor([0.0091, 0.0833, 0.1541, 0.0445, 0.0060, 0.1322, 0.0672, 0.0608], + device='cuda:2'), in_proj_covar=tensor([0.0119, 0.0234, 0.0191, 0.0186, 0.0090, 0.0175, 0.0201, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 02:46:48,045 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39940.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:46:48,865 INFO [train.py:898] (2/4) Epoch 11, batch 3600, loss[loss=0.2228, simple_loss=0.309, pruned_loss=0.06831, over 18272.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.272, pruned_loss=0.05117, over 3597176.61 frames. ], batch size: 57, lr: 1.01e-02, grad_scale: 8.0 +2023-03-09 02:46:55,870 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.247e+02 3.210e+02 3.696e+02 4.808e+02 8.251e+02, threshold=7.392e+02, percent-clipped=2.0 +2023-03-09 02:47:04,134 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-03-09 02:47:53,918 INFO [train.py:898] (2/4) Epoch 12, batch 0, loss[loss=0.1784, simple_loss=0.2655, pruned_loss=0.04568, over 18414.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2655, pruned_loss=0.04568, over 18414.00 frames. ], batch size: 50, lr: 9.70e-03, grad_scale: 8.0 +2023-03-09 02:47:53,919 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 02:48:01,007 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9095, 4.7364, 2.8766, 4.7572, 4.4254, 4.7704, 4.6240, 2.6545], + device='cuda:2'), covar=tensor([0.0174, 0.0089, 0.0706, 0.0072, 0.0101, 0.0089, 0.0107, 0.0956], + device='cuda:2'), in_proj_covar=tensor([0.0079, 0.0067, 0.0088, 0.0082, 0.0077, 0.0066, 0.0076, 0.0092], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0004, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 02:48:04,690 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3472, 5.7647, 5.3689, 5.6521, 5.3915, 5.3967, 5.8320, 5.7504], + device='cuda:2'), covar=tensor([0.0962, 0.0541, 0.0278, 0.0452, 0.1172, 0.0597, 0.0431, 0.0503], + device='cuda:2'), in_proj_covar=tensor([0.0517, 0.0429, 0.0324, 0.0455, 0.0630, 0.0461, 0.0605, 0.0452], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 02:48:05,852 INFO [train.py:932] (2/4) Epoch 12, validation: loss=0.1577, simple_loss=0.2601, pruned_loss=0.02771, over 944034.00 frames. +2023-03-09 02:48:05,853 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 02:48:21,417 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=39988.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:48:29,610 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39995.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 02:49:09,592 INFO [train.py:898] (2/4) Epoch 12, batch 50, loss[loss=0.1823, simple_loss=0.2689, pruned_loss=0.04782, over 18281.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2753, pruned_loss=0.05291, over 799657.04 frames. ], batch size: 47, lr: 9.69e-03, grad_scale: 8.0 +2023-03-09 02:49:35,919 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.813e+02 3.496e+02 4.076e+02 5.396e+02 1.029e+03, threshold=8.152e+02, percent-clipped=4.0 +2023-03-09 02:50:08,397 INFO [train.py:898] (2/4) Epoch 12, batch 100, loss[loss=0.1797, simple_loss=0.2628, pruned_loss=0.04826, over 18559.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2732, pruned_loss=0.05191, over 1405473.67 frames. ], batch size: 49, lr: 9.69e-03, grad_scale: 4.0 +2023-03-09 02:51:00,709 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40119.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:51:05,140 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40123.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:51:07,307 INFO [train.py:898] (2/4) Epoch 12, batch 150, loss[loss=0.1586, simple_loss=0.2342, pruned_loss=0.04151, over 18432.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2711, pruned_loss=0.05114, over 1896371.96 frames. ], batch size: 43, lr: 9.68e-03, grad_scale: 4.0 +2023-03-09 02:51:08,897 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9475, 2.9963, 4.2554, 3.9291, 2.8758, 4.7646, 4.1498, 2.8290], + device='cuda:2'), covar=tensor([0.0366, 0.1327, 0.0285, 0.0309, 0.1374, 0.0155, 0.0390, 0.1083], + device='cuda:2'), in_proj_covar=tensor([0.0191, 0.0226, 0.0155, 0.0148, 0.0215, 0.0188, 0.0211, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 02:51:31,324 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-09 02:51:36,219 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.055e+02 3.093e+02 3.775e+02 4.453e+02 9.107e+02, threshold=7.551e+02, percent-clipped=1.0 +2023-03-09 02:51:39,178 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-09 02:51:46,085 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-09 02:51:54,616 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-03-09 02:51:57,414 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40167.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:52:01,979 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40171.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:52:06,341 INFO [train.py:898] (2/4) Epoch 12, batch 200, loss[loss=0.1937, simple_loss=0.2824, pruned_loss=0.05246, over 18476.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2722, pruned_loss=0.05149, over 2269801.32 frames. ], batch size: 53, lr: 9.68e-03, grad_scale: 4.0 +2023-03-09 02:52:21,021 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.6776, 5.5632, 5.1908, 5.6050, 5.5440, 4.9630, 5.4747, 5.2142], + device='cuda:2'), covar=tensor([0.0345, 0.0381, 0.1350, 0.0673, 0.0518, 0.0383, 0.0399, 0.0914], + device='cuda:2'), in_proj_covar=tensor([0.0399, 0.0463, 0.0614, 0.0363, 0.0348, 0.0420, 0.0450, 0.0578], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 02:52:29,875 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40195.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:52:58,288 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40218.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:53:05,985 INFO [train.py:898] (2/4) Epoch 12, batch 250, loss[loss=0.1831, simple_loss=0.2731, pruned_loss=0.04651, over 18495.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2727, pruned_loss=0.05125, over 2558005.58 frames. ], batch size: 51, lr: 9.67e-03, grad_scale: 4.0 +2023-03-09 02:53:33,645 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.098e+02 3.173e+02 3.973e+02 4.861e+02 1.364e+03, threshold=7.946e+02, percent-clipped=3.0 +2023-03-09 02:53:42,365 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40256.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:54:01,823 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40272.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:54:05,003 INFO [train.py:898] (2/4) Epoch 12, batch 300, loss[loss=0.1902, simple_loss=0.2754, pruned_loss=0.05251, over 18488.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2716, pruned_loss=0.05061, over 2794776.87 frames. ], batch size: 53, lr: 9.66e-03, grad_scale: 4.0 +2023-03-09 02:54:09,959 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40279.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:54:28,432 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40295.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 02:55:04,167 INFO [train.py:898] (2/4) Epoch 12, batch 350, loss[loss=0.1929, simple_loss=0.2802, pruned_loss=0.05286, over 17928.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2722, pruned_loss=0.05069, over 2964802.96 frames. ], batch size: 65, lr: 9.66e-03, grad_scale: 4.0 +2023-03-09 02:55:10,081 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40330.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:55:13,540 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40333.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:55:24,773 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40343.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 02:55:31,750 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.235e+02 3.021e+02 3.795e+02 4.582e+02 1.168e+03, threshold=7.590e+02, percent-clipped=5.0 +2023-03-09 02:56:02,389 INFO [train.py:898] (2/4) Epoch 12, batch 400, loss[loss=0.1983, simple_loss=0.2894, pruned_loss=0.05364, over 18483.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2713, pruned_loss=0.05023, over 3105383.76 frames. ], batch size: 59, lr: 9.65e-03, grad_scale: 8.0 +2023-03-09 02:56:11,353 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6592, 2.6361, 3.9626, 3.5990, 2.5634, 4.3088, 3.8513, 2.7362], + device='cuda:2'), covar=tensor([0.0361, 0.1402, 0.0227, 0.0331, 0.1459, 0.0196, 0.0413, 0.0968], + device='cuda:2'), in_proj_covar=tensor([0.0189, 0.0224, 0.0155, 0.0146, 0.0214, 0.0188, 0.0212, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 02:56:21,628 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40391.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:56:59,562 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40423.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:57:01,662 INFO [train.py:898] (2/4) Epoch 12, batch 450, loss[loss=0.1899, simple_loss=0.278, pruned_loss=0.05089, over 17056.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2715, pruned_loss=0.04992, over 3221292.44 frames. ], batch size: 78, lr: 9.65e-03, grad_scale: 8.0 +2023-03-09 02:57:02,100 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7687, 3.6923, 3.4109, 3.1315, 3.4155, 2.6707, 2.7409, 3.6573], + device='cuda:2'), covar=tensor([0.0033, 0.0071, 0.0059, 0.0108, 0.0075, 0.0159, 0.0167, 0.0050], + device='cuda:2'), in_proj_covar=tensor([0.0099, 0.0122, 0.0106, 0.0155, 0.0108, 0.0152, 0.0157, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 02:57:17,330 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.43 vs. limit=5.0 +2023-03-09 02:57:29,711 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.841e+02 3.105e+02 3.603e+02 4.102e+02 7.155e+02, threshold=7.205e+02, percent-clipped=0.0 +2023-03-09 02:57:55,816 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40471.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:57:55,874 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40471.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:58:00,091 INFO [train.py:898] (2/4) Epoch 12, batch 500, loss[loss=0.1863, simple_loss=0.2657, pruned_loss=0.05347, over 18181.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2709, pruned_loss=0.04983, over 3302340.06 frames. ], batch size: 44, lr: 9.64e-03, grad_scale: 8.0 +2023-03-09 02:58:03,996 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1999, 4.2363, 2.4423, 4.1439, 5.2846, 2.8101, 3.6211, 3.6410], + device='cuda:2'), covar=tensor([0.0103, 0.1299, 0.1651, 0.0626, 0.0054, 0.1230, 0.0858, 0.1016], + device='cuda:2'), in_proj_covar=tensor([0.0120, 0.0237, 0.0191, 0.0190, 0.0092, 0.0179, 0.0203, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 02:58:20,971 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40492.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:58:52,459 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40519.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:58:59,227 INFO [train.py:898] (2/4) Epoch 12, batch 550, loss[loss=0.1821, simple_loss=0.2701, pruned_loss=0.04706, over 18406.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2705, pruned_loss=0.04968, over 3376438.69 frames. ], batch size: 52, lr: 9.63e-03, grad_scale: 8.0 +2023-03-09 02:59:18,786 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.46 vs. limit=5.0 +2023-03-09 02:59:19,400 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3984, 5.1852, 5.5650, 5.4477, 5.3535, 6.1390, 5.8171, 5.5439], + device='cuda:2'), covar=tensor([0.0927, 0.0655, 0.0690, 0.0661, 0.1518, 0.0697, 0.0517, 0.1515], + device='cuda:2'), in_proj_covar=tensor([0.0311, 0.0242, 0.0253, 0.0256, 0.0295, 0.0360, 0.0240, 0.0352], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:2') +2023-03-09 02:59:22,070 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-09 02:59:26,873 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.116e+02 3.269e+02 4.036e+02 4.725e+02 9.923e+02, threshold=8.073e+02, percent-clipped=3.0 +2023-03-09 02:59:29,413 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40551.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:59:31,840 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40553.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:59:52,054 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0201, 3.8480, 5.2184, 4.6505, 3.0545, 3.0558, 4.5169, 5.3760], + device='cuda:2'), covar=tensor([0.0694, 0.1602, 0.0115, 0.0278, 0.0931, 0.1022, 0.0347, 0.0168], + device='cuda:2'), in_proj_covar=tensor([0.0138, 0.0247, 0.0106, 0.0162, 0.0178, 0.0176, 0.0174, 0.0151], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 02:59:57,021 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40574.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 02:59:57,879 INFO [train.py:898] (2/4) Epoch 12, batch 600, loss[loss=0.1825, simple_loss=0.2675, pruned_loss=0.04879, over 18409.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2709, pruned_loss=0.05028, over 3422101.89 frames. ], batch size: 50, lr: 9.63e-03, grad_scale: 4.0 +2023-03-09 03:00:42,395 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.44 vs. limit=5.0 +2023-03-09 03:00:56,592 INFO [train.py:898] (2/4) Epoch 12, batch 650, loss[loss=0.2045, simple_loss=0.2926, pruned_loss=0.05821, over 18360.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2705, pruned_loss=0.05017, over 3460586.16 frames. ], batch size: 56, lr: 9.62e-03, grad_scale: 4.0 +2023-03-09 03:00:59,786 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3242, 5.8393, 5.3100, 5.5727, 5.4096, 5.2932, 5.8392, 5.8017], + device='cuda:2'), covar=tensor([0.1196, 0.0626, 0.0496, 0.0671, 0.1323, 0.0712, 0.0522, 0.0628], + device='cuda:2'), in_proj_covar=tensor([0.0526, 0.0437, 0.0331, 0.0464, 0.0640, 0.0473, 0.0614, 0.0457], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 03:01:00,852 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40628.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:01:26,540 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.880e+02 2.745e+02 3.552e+02 4.267e+02 1.309e+03, threshold=7.104e+02, percent-clipped=1.0 +2023-03-09 03:01:28,127 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40651.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:01:55,471 INFO [train.py:898] (2/4) Epoch 12, batch 700, loss[loss=0.1812, simple_loss=0.2671, pruned_loss=0.04768, over 18534.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.271, pruned_loss=0.05006, over 3488623.30 frames. ], batch size: 49, lr: 9.62e-03, grad_scale: 4.0 +2023-03-09 03:02:07,916 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0320, 3.8447, 5.0776, 3.0688, 4.3615, 2.7100, 3.0863, 1.8355], + device='cuda:2'), covar=tensor([0.0875, 0.0690, 0.0076, 0.0650, 0.0515, 0.2106, 0.2296, 0.1819], + device='cuda:2'), in_proj_covar=tensor([0.0200, 0.0219, 0.0123, 0.0173, 0.0233, 0.0250, 0.0290, 0.0213], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 03:02:08,793 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40686.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:02:23,426 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6018, 3.5433, 3.3712, 2.9764, 3.4249, 2.6974, 2.5643, 3.6770], + device='cuda:2'), covar=tensor([0.0040, 0.0062, 0.0067, 0.0114, 0.0067, 0.0167, 0.0177, 0.0047], + device='cuda:2'), in_proj_covar=tensor([0.0098, 0.0122, 0.0105, 0.0154, 0.0107, 0.0151, 0.0156, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 03:02:39,440 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40712.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:02:54,261 INFO [train.py:898] (2/4) Epoch 12, batch 750, loss[loss=0.1776, simple_loss=0.2519, pruned_loss=0.05164, over 17718.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2716, pruned_loss=0.05061, over 3493927.34 frames. ], batch size: 39, lr: 9.61e-03, grad_scale: 4.0 +2023-03-09 03:03:13,459 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-03-09 03:03:25,291 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.144e+02 3.250e+02 3.776e+02 4.415e+02 7.567e+02, threshold=7.551e+02, percent-clipped=1.0 +2023-03-09 03:03:33,619 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0677, 5.1679, 2.5516, 5.0149, 4.8976, 5.1864, 4.9574, 2.2988], + device='cuda:2'), covar=tensor([0.0172, 0.0115, 0.0954, 0.0138, 0.0101, 0.0118, 0.0143, 0.1538], + device='cuda:2'), in_proj_covar=tensor([0.0078, 0.0067, 0.0087, 0.0081, 0.0075, 0.0066, 0.0076, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 03:03:52,300 INFO [train.py:898] (2/4) Epoch 12, batch 800, loss[loss=0.1601, simple_loss=0.2425, pruned_loss=0.03891, over 18422.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2713, pruned_loss=0.05062, over 3515432.88 frames. ], batch size: 43, lr: 9.61e-03, grad_scale: 4.0 +2023-03-09 03:04:31,235 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4310, 5.9694, 5.4235, 5.7932, 5.5947, 5.4597, 6.0858, 5.9974], + device='cuda:2'), covar=tensor([0.1211, 0.0744, 0.0430, 0.0677, 0.1237, 0.0737, 0.0496, 0.0659], + device='cuda:2'), in_proj_covar=tensor([0.0535, 0.0445, 0.0336, 0.0471, 0.0648, 0.0479, 0.0626, 0.0465], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 03:04:39,350 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-09 03:04:51,188 INFO [train.py:898] (2/4) Epoch 12, batch 850, loss[loss=0.2189, simple_loss=0.3176, pruned_loss=0.06012, over 18350.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2715, pruned_loss=0.05052, over 3539438.05 frames. ], batch size: 56, lr: 9.60e-03, grad_scale: 4.0 +2023-03-09 03:05:17,933 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40848.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:05:21,106 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.184e+02 3.104e+02 3.641e+02 4.538e+02 1.053e+03, threshold=7.281e+02, percent-clipped=3.0 +2023-03-09 03:05:21,357 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40851.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:05:38,616 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40865.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:05:48,785 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40874.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:05:49,610 INFO [train.py:898] (2/4) Epoch 12, batch 900, loss[loss=0.1896, simple_loss=0.2863, pruned_loss=0.04649, over 16070.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2721, pruned_loss=0.05085, over 3547790.69 frames. ], batch size: 95, lr: 9.59e-03, grad_scale: 4.0 +2023-03-09 03:05:52,517 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-09 03:06:18,246 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40899.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:06:19,562 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40900.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:06:45,187 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40922.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:06:47,666 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7278, 4.6471, 4.7892, 4.5702, 4.5087, 4.6595, 4.9118, 4.9733], + device='cuda:2'), covar=tensor([0.0060, 0.0085, 0.0069, 0.0098, 0.0070, 0.0100, 0.0100, 0.0089], + device='cuda:2'), in_proj_covar=tensor([0.0081, 0.0057, 0.0061, 0.0076, 0.0063, 0.0088, 0.0073, 0.0073], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 03:06:48,427 INFO [train.py:898] (2/4) Epoch 12, batch 950, loss[loss=0.2037, simple_loss=0.2936, pruned_loss=0.05688, over 18132.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2718, pruned_loss=0.05072, over 3546675.24 frames. ], batch size: 62, lr: 9.59e-03, grad_scale: 4.0 +2023-03-09 03:06:49,949 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40926.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:06:52,112 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40928.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:07:18,899 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.424e+02 3.344e+02 4.191e+02 5.324e+02 1.167e+03, threshold=8.382e+02, percent-clipped=6.0 +2023-03-09 03:07:31,626 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40961.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 03:07:43,818 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5048, 3.5476, 3.2923, 2.9523, 3.2281, 2.6122, 2.4850, 3.4545], + device='cuda:2'), covar=tensor([0.0054, 0.0070, 0.0065, 0.0124, 0.0087, 0.0168, 0.0192, 0.0073], + device='cuda:2'), in_proj_covar=tensor([0.0099, 0.0123, 0.0107, 0.0157, 0.0109, 0.0153, 0.0158, 0.0090], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 03:07:46,839 INFO [train.py:898] (2/4) Epoch 12, batch 1000, loss[loss=0.1997, simple_loss=0.29, pruned_loss=0.05466, over 18406.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2716, pruned_loss=0.05051, over 3549703.96 frames. ], batch size: 52, lr: 9.58e-03, grad_scale: 4.0 +2023-03-09 03:07:48,011 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=40976.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:07:59,325 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40986.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:08:11,868 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40997.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:08:24,375 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41007.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:08:40,038 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0973, 5.4618, 2.8838, 5.2737, 5.1436, 5.5025, 5.3324, 2.6944], + device='cuda:2'), covar=tensor([0.0173, 0.0045, 0.0763, 0.0067, 0.0071, 0.0055, 0.0072, 0.0946], + device='cuda:2'), in_proj_covar=tensor([0.0077, 0.0067, 0.0087, 0.0082, 0.0075, 0.0066, 0.0077, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0004, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 03:08:45,140 INFO [train.py:898] (2/4) Epoch 12, batch 1050, loss[loss=0.2099, simple_loss=0.2875, pruned_loss=0.06616, over 18098.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.271, pruned_loss=0.05036, over 3565574.91 frames. ], batch size: 62, lr: 9.58e-03, grad_scale: 4.0 +2023-03-09 03:08:55,721 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41034.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:08:58,074 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4830, 5.4018, 4.9876, 5.3922, 5.4134, 4.7984, 5.2774, 5.0447], + device='cuda:2'), covar=tensor([0.0416, 0.0404, 0.1498, 0.0711, 0.0462, 0.0428, 0.0418, 0.0914], + device='cuda:2'), in_proj_covar=tensor([0.0410, 0.0471, 0.0630, 0.0371, 0.0353, 0.0427, 0.0463, 0.0590], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 03:09:08,594 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41045.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:09:14,993 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.197e+02 3.189e+02 3.617e+02 4.210e+02 9.012e+02, threshold=7.234e+02, percent-clipped=1.0 +2023-03-09 03:09:24,028 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41058.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:09:43,918 INFO [train.py:898] (2/4) Epoch 12, batch 1100, loss[loss=0.1581, simple_loss=0.2409, pruned_loss=0.03767, over 18432.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2711, pruned_loss=0.05051, over 3565748.49 frames. ], batch size: 43, lr: 9.57e-03, grad_scale: 4.0 +2023-03-09 03:10:15,167 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41102.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:10:19,922 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41106.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:10:41,771 INFO [train.py:898] (2/4) Epoch 12, batch 1150, loss[loss=0.1918, simple_loss=0.288, pruned_loss=0.0478, over 18358.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2709, pruned_loss=0.05041, over 3566683.32 frames. ], batch size: 55, lr: 9.56e-03, grad_scale: 4.0 +2023-03-09 03:11:07,688 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41148.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:11:11,014 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.262e+02 3.020e+02 3.568e+02 4.440e+02 1.142e+03, threshold=7.137e+02, percent-clipped=4.0 +2023-03-09 03:11:25,792 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41163.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:11:39,956 INFO [train.py:898] (2/4) Epoch 12, batch 1200, loss[loss=0.1608, simple_loss=0.2417, pruned_loss=0.03993, over 18436.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2716, pruned_loss=0.05063, over 3565252.14 frames. ], batch size: 42, lr: 9.56e-03, grad_scale: 8.0 +2023-03-09 03:11:52,641 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-09 03:12:03,586 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41196.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:12:25,918 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.45 vs. limit=5.0 +2023-03-09 03:12:34,350 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41221.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:12:38,717 INFO [train.py:898] (2/4) Epoch 12, batch 1250, loss[loss=0.1599, simple_loss=0.2411, pruned_loss=0.03937, over 18168.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2706, pruned_loss=0.05009, over 3583445.90 frames. ], batch size: 44, lr: 9.55e-03, grad_scale: 8.0 +2023-03-09 03:13:08,569 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.122e+02 3.147e+02 3.624e+02 4.404e+02 8.408e+02, threshold=7.247e+02, percent-clipped=2.0 +2023-03-09 03:13:14,412 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41256.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 03:13:37,215 INFO [train.py:898] (2/4) Epoch 12, batch 1300, loss[loss=0.2094, simple_loss=0.2885, pruned_loss=0.06512, over 18369.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2704, pruned_loss=0.04978, over 3598200.45 frames. ], batch size: 46, lr: 9.55e-03, grad_scale: 8.0 +2023-03-09 03:13:40,947 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.2472, 5.4980, 2.8179, 5.3419, 5.1419, 5.4127, 5.2541, 2.6470], + device='cuda:2'), covar=tensor([0.0152, 0.0099, 0.0879, 0.0076, 0.0107, 0.0163, 0.0146, 0.1358], + device='cuda:2'), in_proj_covar=tensor([0.0078, 0.0067, 0.0088, 0.0083, 0.0076, 0.0066, 0.0076, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 03:13:46,998 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-09 03:14:14,031 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41307.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:14:16,476 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6938, 3.0342, 4.4135, 3.8575, 2.3866, 4.6443, 4.0639, 2.8393], + device='cuda:2'), covar=tensor([0.0428, 0.1269, 0.0150, 0.0333, 0.1668, 0.0145, 0.0372, 0.0898], + device='cuda:2'), in_proj_covar=tensor([0.0191, 0.0227, 0.0155, 0.0146, 0.0213, 0.0186, 0.0211, 0.0192], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 03:14:35,259 INFO [train.py:898] (2/4) Epoch 12, batch 1350, loss[loss=0.1574, simple_loss=0.2335, pruned_loss=0.04065, over 17684.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2693, pruned_loss=0.04945, over 3586206.94 frames. ], batch size: 39, lr: 9.54e-03, grad_scale: 8.0 +2023-03-09 03:15:05,736 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.782e+02 3.011e+02 3.906e+02 4.787e+02 1.227e+03, threshold=7.812e+02, percent-clipped=6.0 +2023-03-09 03:15:08,206 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41353.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:15:10,508 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41355.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:15:34,028 INFO [train.py:898] (2/4) Epoch 12, batch 1400, loss[loss=0.159, simple_loss=0.2437, pruned_loss=0.03715, over 18380.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2697, pruned_loss=0.04973, over 3573521.51 frames. ], batch size: 46, lr: 9.54e-03, grad_scale: 8.0 +2023-03-09 03:16:04,795 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41401.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:16:32,785 INFO [train.py:898] (2/4) Epoch 12, batch 1450, loss[loss=0.189, simple_loss=0.2607, pruned_loss=0.05865, over 18497.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2696, pruned_loss=0.04971, over 3572661.40 frames. ], batch size: 44, lr: 9.53e-03, grad_scale: 8.0 +2023-03-09 03:16:52,881 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-09 03:17:03,085 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.84 vs. limit=5.0 +2023-03-09 03:17:03,287 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.105e+02 2.980e+02 3.633e+02 4.455e+02 8.287e+02, threshold=7.266e+02, percent-clipped=1.0 +2023-03-09 03:17:11,474 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41458.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:17:28,777 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4765, 2.0791, 2.5114, 2.5854, 3.1500, 4.8310, 4.4152, 3.6864], + device='cuda:2'), covar=tensor([0.1362, 0.2156, 0.2552, 0.1454, 0.1972, 0.0131, 0.0389, 0.0570], + device='cuda:2'), in_proj_covar=tensor([0.0250, 0.0307, 0.0327, 0.0252, 0.0368, 0.0191, 0.0265, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 03:17:31,242 INFO [train.py:898] (2/4) Epoch 12, batch 1500, loss[loss=0.1831, simple_loss=0.2723, pruned_loss=0.04699, over 18255.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2688, pruned_loss=0.04931, over 3586058.76 frames. ], batch size: 47, lr: 9.52e-03, grad_scale: 8.0 +2023-03-09 03:18:09,786 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5735, 3.7687, 5.2005, 4.6417, 3.4680, 3.2435, 4.6482, 5.4150], + device='cuda:2'), covar=tensor([0.0860, 0.1747, 0.0115, 0.0282, 0.0771, 0.0958, 0.0287, 0.0232], + device='cuda:2'), in_proj_covar=tensor([0.0138, 0.0249, 0.0109, 0.0164, 0.0180, 0.0179, 0.0177, 0.0154], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 03:18:24,533 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41521.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:18:28,654 INFO [train.py:898] (2/4) Epoch 12, batch 1550, loss[loss=0.1774, simple_loss=0.2691, pruned_loss=0.04282, over 18400.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2697, pruned_loss=0.04973, over 3583167.92 frames. ], batch size: 48, lr: 9.52e-03, grad_scale: 8.0 +2023-03-09 03:18:43,461 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0667, 5.3354, 2.7142, 5.1493, 5.0649, 5.3624, 5.2141, 2.6567], + device='cuda:2'), covar=tensor([0.0170, 0.0062, 0.0826, 0.0082, 0.0075, 0.0082, 0.0075, 0.1013], + device='cuda:2'), in_proj_covar=tensor([0.0079, 0.0069, 0.0088, 0.0084, 0.0077, 0.0067, 0.0077, 0.0092], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 03:19:00,402 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.006e+02 3.193e+02 3.688e+02 4.708e+02 1.427e+03, threshold=7.376e+02, percent-clipped=3.0 +2023-03-09 03:19:06,262 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41556.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 03:19:20,755 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41569.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:19:27,333 INFO [train.py:898] (2/4) Epoch 12, batch 1600, loss[loss=0.1825, simple_loss=0.2702, pruned_loss=0.04739, over 18306.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2689, pruned_loss=0.04951, over 3591680.71 frames. ], batch size: 54, lr: 9.51e-03, grad_scale: 8.0 +2023-03-09 03:19:48,308 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-09 03:20:03,010 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41604.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:20:20,257 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7604, 3.4243, 4.4647, 3.0437, 3.8536, 2.6046, 2.8930, 1.9949], + device='cuda:2'), covar=tensor([0.0892, 0.0764, 0.0125, 0.0612, 0.0600, 0.1980, 0.2023, 0.1631], + device='cuda:2'), in_proj_covar=tensor([0.0198, 0.0221, 0.0123, 0.0175, 0.0232, 0.0248, 0.0291, 0.0212], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 03:20:26,593 INFO [train.py:898] (2/4) Epoch 12, batch 1650, loss[loss=0.1683, simple_loss=0.2528, pruned_loss=0.04194, over 18568.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2694, pruned_loss=0.0497, over 3603470.62 frames. ], batch size: 49, lr: 9.51e-03, grad_scale: 8.0 +2023-03-09 03:20:58,272 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.922e+02 3.190e+02 3.689e+02 4.449e+02 1.002e+03, threshold=7.378e+02, percent-clipped=1.0 +2023-03-09 03:21:00,836 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41653.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:21:01,987 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41654.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:21:25,757 INFO [train.py:898] (2/4) Epoch 12, batch 1700, loss[loss=0.2422, simple_loss=0.3086, pruned_loss=0.08793, over 12280.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.27, pruned_loss=0.04974, over 3594894.63 frames. ], batch size: 129, lr: 9.50e-03, grad_scale: 8.0 +2023-03-09 03:21:57,768 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41701.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:21:57,860 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41701.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:22:09,211 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8324, 3.0443, 4.4586, 3.9297, 2.8021, 4.5984, 3.9940, 2.9225], + device='cuda:2'), covar=tensor([0.0386, 0.1204, 0.0152, 0.0286, 0.1340, 0.0176, 0.0431, 0.0850], + device='cuda:2'), in_proj_covar=tensor([0.0189, 0.0225, 0.0158, 0.0144, 0.0210, 0.0186, 0.0211, 0.0191], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 03:22:13,849 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41715.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 03:22:24,611 INFO [train.py:898] (2/4) Epoch 12, batch 1750, loss[loss=0.158, simple_loss=0.2353, pruned_loss=0.04036, over 17643.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2694, pruned_loss=0.04932, over 3602431.64 frames. ], batch size: 39, lr: 9.50e-03, grad_scale: 8.0 +2023-03-09 03:22:36,884 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2395, 5.1491, 5.4142, 5.3138, 5.0515, 5.9582, 5.6297, 5.2156], + device='cuda:2'), covar=tensor([0.0922, 0.0579, 0.0679, 0.0581, 0.1424, 0.0750, 0.0603, 0.1638], + device='cuda:2'), in_proj_covar=tensor([0.0311, 0.0237, 0.0255, 0.0258, 0.0296, 0.0363, 0.0241, 0.0352], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:2') +2023-03-09 03:22:54,140 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41749.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:22:56,200 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.910e+02 3.102e+02 3.582e+02 4.165e+02 6.416e+02, threshold=7.165e+02, percent-clipped=1.0 +2023-03-09 03:23:04,325 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41758.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:23:22,838 INFO [train.py:898] (2/4) Epoch 12, batch 1800, loss[loss=0.1855, simple_loss=0.2759, pruned_loss=0.04749, over 18317.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2701, pruned_loss=0.04947, over 3596969.69 frames. ], batch size: 54, lr: 9.49e-03, grad_scale: 8.0 +2023-03-09 03:23:59,584 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=41806.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:24:18,934 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5804, 3.5254, 5.0064, 4.1953, 3.3269, 2.8803, 4.3754, 5.0523], + device='cuda:2'), covar=tensor([0.0811, 0.1620, 0.0133, 0.0373, 0.0795, 0.1111, 0.0354, 0.0238], + device='cuda:2'), in_proj_covar=tensor([0.0137, 0.0248, 0.0110, 0.0163, 0.0178, 0.0178, 0.0175, 0.0154], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 03:24:20,775 INFO [train.py:898] (2/4) Epoch 12, batch 1850, loss[loss=0.1914, simple_loss=0.2714, pruned_loss=0.05567, over 18416.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2713, pruned_loss=0.05028, over 3584309.51 frames. ], batch size: 48, lr: 9.49e-03, grad_scale: 8.0 +2023-03-09 03:24:51,662 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.068e+02 3.165e+02 3.584e+02 4.367e+02 1.390e+03, threshold=7.168e+02, percent-clipped=5.0 +2023-03-09 03:25:19,163 INFO [train.py:898] (2/4) Epoch 12, batch 1900, loss[loss=0.1969, simple_loss=0.2854, pruned_loss=0.05421, over 18617.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2716, pruned_loss=0.0505, over 3591096.51 frames. ], batch size: 52, lr: 9.48e-03, grad_scale: 8.0 +2023-03-09 03:25:22,006 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.17 vs. limit=5.0 +2023-03-09 03:25:56,902 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41906.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:26:10,463 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0622, 4.1765, 2.2511, 4.1964, 5.1592, 2.3750, 3.5332, 3.7058], + device='cuda:2'), covar=tensor([0.0127, 0.1089, 0.1847, 0.0528, 0.0064, 0.1482, 0.0825, 0.0879], + device='cuda:2'), in_proj_covar=tensor([0.0121, 0.0235, 0.0190, 0.0189, 0.0093, 0.0177, 0.0203, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 03:26:17,859 INFO [train.py:898] (2/4) Epoch 12, batch 1950, loss[loss=0.1923, simple_loss=0.2711, pruned_loss=0.05677, over 18434.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2701, pruned_loss=0.0501, over 3580476.61 frames. ], batch size: 48, lr: 9.47e-03, grad_scale: 8.0 +2023-03-09 03:26:44,574 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41948.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 03:26:47,526 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.082e+02 3.043e+02 3.852e+02 4.648e+02 1.107e+03, threshold=7.704e+02, percent-clipped=2.0 +2023-03-09 03:27:07,339 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41967.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:27:10,933 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7272, 3.6491, 4.9768, 3.0149, 4.4025, 2.6081, 3.0527, 1.8455], + device='cuda:2'), covar=tensor([0.0992, 0.0770, 0.0095, 0.0687, 0.0479, 0.2198, 0.2368, 0.1810], + device='cuda:2'), in_proj_covar=tensor([0.0197, 0.0219, 0.0121, 0.0172, 0.0229, 0.0244, 0.0288, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 03:27:16,275 INFO [train.py:898] (2/4) Epoch 12, batch 2000, loss[loss=0.182, simple_loss=0.2765, pruned_loss=0.04371, over 18353.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2704, pruned_loss=0.05007, over 3582184.97 frames. ], batch size: 55, lr: 9.47e-03, grad_scale: 8.0 +2023-03-09 03:28:01,245 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.32 vs. limit=5.0 +2023-03-09 03:28:02,195 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42009.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 03:28:03,090 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42010.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 03:28:20,368 INFO [train.py:898] (2/4) Epoch 12, batch 2050, loss[loss=0.1647, simple_loss=0.2407, pruned_loss=0.04434, over 18431.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2704, pruned_loss=0.04999, over 3590266.68 frames. ], batch size: 43, lr: 9.46e-03, grad_scale: 8.0 +2023-03-09 03:28:50,575 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.053e+02 3.014e+02 3.595e+02 4.305e+02 8.922e+02, threshold=7.191e+02, percent-clipped=2.0 +2023-03-09 03:29:19,680 INFO [train.py:898] (2/4) Epoch 12, batch 2100, loss[loss=0.1611, simple_loss=0.2424, pruned_loss=0.03995, over 18418.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2694, pruned_loss=0.0494, over 3595909.87 frames. ], batch size: 42, lr: 9.46e-03, grad_scale: 4.0 +2023-03-09 03:29:20,189 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6943, 3.4431, 4.8026, 2.6384, 4.1113, 2.5021, 2.9518, 1.8610], + device='cuda:2'), covar=tensor([0.0991, 0.0856, 0.0124, 0.0789, 0.0568, 0.2246, 0.2350, 0.1808], + device='cuda:2'), in_proj_covar=tensor([0.0197, 0.0219, 0.0123, 0.0174, 0.0231, 0.0246, 0.0290, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 03:30:18,694 INFO [train.py:898] (2/4) Epoch 12, batch 2150, loss[loss=0.1849, simple_loss=0.2738, pruned_loss=0.04799, over 18488.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2692, pruned_loss=0.04906, over 3591405.80 frames. ], batch size: 51, lr: 9.45e-03, grad_scale: 4.0 +2023-03-09 03:30:19,032 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.4026, 4.8722, 4.8306, 4.8926, 4.4221, 4.7328, 4.2624, 4.7406], + device='cuda:2'), covar=tensor([0.0267, 0.0319, 0.0239, 0.0336, 0.0362, 0.0245, 0.1107, 0.0300], + device='cuda:2'), in_proj_covar=tensor([0.0182, 0.0228, 0.0218, 0.0263, 0.0230, 0.0232, 0.0292, 0.0219], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-09 03:30:45,402 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8718, 2.9170, 4.2743, 4.0310, 2.4817, 4.5849, 3.8933, 2.7138], + device='cuda:2'), covar=tensor([0.0369, 0.1321, 0.0215, 0.0255, 0.1575, 0.0162, 0.0409, 0.1002], + device='cuda:2'), in_proj_covar=tensor([0.0189, 0.0223, 0.0157, 0.0142, 0.0209, 0.0183, 0.0208, 0.0188], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 03:30:49,400 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.910e+02 3.158e+02 3.739e+02 4.529e+02 7.533e+02, threshold=7.477e+02, percent-clipped=1.0 +2023-03-09 03:31:17,038 INFO [train.py:898] (2/4) Epoch 12, batch 2200, loss[loss=0.2026, simple_loss=0.2865, pruned_loss=0.05935, over 18563.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2698, pruned_loss=0.04964, over 3575350.52 frames. ], batch size: 54, lr: 9.45e-03, grad_scale: 4.0 +2023-03-09 03:32:15,656 INFO [train.py:898] (2/4) Epoch 12, batch 2250, loss[loss=0.1992, simple_loss=0.2824, pruned_loss=0.05804, over 16351.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2701, pruned_loss=0.04985, over 3572245.64 frames. ], batch size: 94, lr: 9.44e-03, grad_scale: 4.0 +2023-03-09 03:32:18,803 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.20 vs. limit=5.0 +2023-03-09 03:32:46,342 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.218e+02 3.002e+02 3.427e+02 4.398e+02 7.070e+02, threshold=6.854e+02, percent-clipped=0.0 +2023-03-09 03:32:58,282 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42262.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:33:14,288 INFO [train.py:898] (2/4) Epoch 12, batch 2300, loss[loss=0.1626, simple_loss=0.2507, pruned_loss=0.03719, over 18273.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2702, pruned_loss=0.04974, over 3574804.03 frames. ], batch size: 49, lr: 9.44e-03, grad_scale: 4.0 +2023-03-09 03:33:47,385 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42304.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 03:33:54,901 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42310.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 03:34:10,342 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-09 03:34:13,010 INFO [train.py:898] (2/4) Epoch 12, batch 2350, loss[loss=0.182, simple_loss=0.2732, pruned_loss=0.04536, over 18326.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2703, pruned_loss=0.04957, over 3575139.50 frames. ], batch size: 54, lr: 9.43e-03, grad_scale: 4.0 +2023-03-09 03:34:29,348 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-03-09 03:34:43,296 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.195e+02 3.349e+02 3.901e+02 4.944e+02 8.097e+02, threshold=7.803e+02, percent-clipped=6.0 +2023-03-09 03:34:50,327 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42358.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:35:08,476 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4990, 5.3289, 4.9836, 5.4247, 5.3620, 4.7050, 5.2770, 5.0244], + device='cuda:2'), covar=tensor([0.0388, 0.0482, 0.1439, 0.0739, 0.0585, 0.0485, 0.0477, 0.1027], + device='cuda:2'), in_proj_covar=tensor([0.0413, 0.0480, 0.0635, 0.0372, 0.0360, 0.0431, 0.0464, 0.0596], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 03:35:10,382 INFO [train.py:898] (2/4) Epoch 12, batch 2400, loss[loss=0.1848, simple_loss=0.2755, pruned_loss=0.04704, over 18189.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2708, pruned_loss=0.04987, over 3583146.56 frames. ], batch size: 60, lr: 9.42e-03, grad_scale: 8.0 +2023-03-09 03:35:16,002 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42379.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:36:05,755 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.29 vs. limit=5.0 +2023-03-09 03:36:08,416 INFO [train.py:898] (2/4) Epoch 12, batch 2450, loss[loss=0.1572, simple_loss=0.2352, pruned_loss=0.03958, over 18186.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2698, pruned_loss=0.04928, over 3594037.90 frames. ], batch size: 44, lr: 9.42e-03, grad_scale: 8.0 +2023-03-09 03:36:27,164 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42440.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:36:40,613 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.166e+02 3.219e+02 3.774e+02 4.354e+02 8.177e+02, threshold=7.548e+02, percent-clipped=1.0 +2023-03-09 03:37:07,530 INFO [train.py:898] (2/4) Epoch 12, batch 2500, loss[loss=0.2119, simple_loss=0.2906, pruned_loss=0.06656, over 18116.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2693, pruned_loss=0.049, over 3602496.75 frames. ], batch size: 62, lr: 9.41e-03, grad_scale: 8.0 +2023-03-09 03:37:35,957 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.62 vs. limit=2.0 +2023-03-09 03:37:44,347 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42506.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:38:06,074 INFO [train.py:898] (2/4) Epoch 12, batch 2550, loss[loss=0.186, simple_loss=0.2759, pruned_loss=0.04801, over 18450.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2697, pruned_loss=0.04921, over 3597091.68 frames. ], batch size: 59, lr: 9.41e-03, grad_scale: 8.0 +2023-03-09 03:38:31,132 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8122, 2.8812, 4.3165, 3.9702, 2.3713, 4.5696, 4.0818, 2.6273], + device='cuda:2'), covar=tensor([0.0386, 0.1468, 0.0239, 0.0264, 0.1715, 0.0199, 0.0380, 0.1193], + device='cuda:2'), in_proj_covar=tensor([0.0194, 0.0227, 0.0162, 0.0146, 0.0213, 0.0188, 0.0216, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 03:38:34,521 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1048, 2.4884, 2.2915, 2.4885, 3.2060, 3.1139, 2.7731, 2.6072], + device='cuda:2'), covar=tensor([0.0266, 0.0284, 0.0555, 0.0427, 0.0198, 0.0151, 0.0401, 0.0359], + device='cuda:2'), in_proj_covar=tensor([0.0124, 0.0113, 0.0156, 0.0141, 0.0109, 0.0094, 0.0139, 0.0134], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 03:38:37,551 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.199e+02 3.182e+02 3.754e+02 4.508e+02 9.914e+02, threshold=7.507e+02, percent-clipped=4.0 +2023-03-09 03:38:49,218 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42562.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:38:54,969 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42567.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:38:57,262 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4715, 6.0203, 5.4232, 5.7427, 5.5079, 5.4719, 5.9962, 6.0198], + device='cuda:2'), covar=tensor([0.0902, 0.0591, 0.0477, 0.0680, 0.1368, 0.0671, 0.0545, 0.0640], + device='cuda:2'), in_proj_covar=tensor([0.0532, 0.0436, 0.0339, 0.0470, 0.0651, 0.0479, 0.0623, 0.0467], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 03:39:04,475 INFO [train.py:898] (2/4) Epoch 12, batch 2600, loss[loss=0.188, simple_loss=0.2781, pruned_loss=0.04891, over 18620.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2708, pruned_loss=0.04986, over 3576653.86 frames. ], batch size: 52, lr: 9.40e-03, grad_scale: 8.0 +2023-03-09 03:39:39,400 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42604.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 03:39:46,072 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42610.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:40:03,350 INFO [train.py:898] (2/4) Epoch 12, batch 2650, loss[loss=0.1967, simple_loss=0.2819, pruned_loss=0.05579, over 18344.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2711, pruned_loss=0.04989, over 3589030.33 frames. ], batch size: 55, lr: 9.40e-03, grad_scale: 8.0 +2023-03-09 03:40:12,787 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9494, 4.9119, 4.4940, 4.8794, 4.8852, 4.3005, 4.7997, 4.5781], + device='cuda:2'), covar=tensor([0.0465, 0.0503, 0.1571, 0.0754, 0.0601, 0.0464, 0.0462, 0.0982], + device='cuda:2'), in_proj_covar=tensor([0.0416, 0.0486, 0.0635, 0.0377, 0.0364, 0.0437, 0.0466, 0.0604], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 03:40:14,194 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.20 vs. limit=5.0 +2023-03-09 03:40:31,676 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42649.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:40:34,771 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.962e+02 3.110e+02 3.787e+02 4.423e+02 7.417e+02, threshold=7.574e+02, percent-clipped=0.0 +2023-03-09 03:40:35,006 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=42652.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 03:41:00,847 INFO [train.py:898] (2/4) Epoch 12, batch 2700, loss[loss=0.1923, simple_loss=0.2618, pruned_loss=0.06145, over 18263.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2701, pruned_loss=0.04972, over 3589637.52 frames. ], batch size: 45, lr: 9.39e-03, grad_scale: 8.0 +2023-03-09 03:41:42,263 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42710.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:41:59,010 INFO [train.py:898] (2/4) Epoch 12, batch 2750, loss[loss=0.18, simple_loss=0.2664, pruned_loss=0.04675, over 16946.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2697, pruned_loss=0.04952, over 3587441.68 frames. ], batch size: 78, lr: 9.39e-03, grad_scale: 8.0 +2023-03-09 03:42:09,165 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5667, 5.0999, 5.1232, 5.0882, 4.5821, 4.9436, 4.3801, 4.9852], + device='cuda:2'), covar=tensor([0.0237, 0.0273, 0.0172, 0.0378, 0.0356, 0.0232, 0.1132, 0.0301], + device='cuda:2'), in_proj_covar=tensor([0.0180, 0.0227, 0.0213, 0.0260, 0.0227, 0.0229, 0.0285, 0.0217], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-09 03:42:11,337 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42735.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:42:32,192 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.860e+02 3.123e+02 3.590e+02 4.311e+02 1.461e+03, threshold=7.180e+02, percent-clipped=1.0 +2023-03-09 03:42:58,260 INFO [train.py:898] (2/4) Epoch 12, batch 2800, loss[loss=0.1731, simple_loss=0.2521, pruned_loss=0.04699, over 18359.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2703, pruned_loss=0.0494, over 3599474.69 frames. ], batch size: 46, lr: 9.38e-03, grad_scale: 8.0 +2023-03-09 03:43:34,705 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3804, 5.2763, 5.5247, 5.5067, 5.2964, 6.0880, 5.7207, 5.4521], + device='cuda:2'), covar=tensor([0.1022, 0.0589, 0.0722, 0.0645, 0.1463, 0.0757, 0.0588, 0.1481], + device='cuda:2'), in_proj_covar=tensor([0.0312, 0.0235, 0.0254, 0.0254, 0.0295, 0.0357, 0.0241, 0.0348], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-09 03:43:51,835 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2173, 5.1653, 5.3273, 5.1580, 5.1395, 5.8786, 5.4606, 5.2253], + device='cuda:2'), covar=tensor([0.0886, 0.0552, 0.0636, 0.0649, 0.1278, 0.0705, 0.0599, 0.1474], + device='cuda:2'), in_proj_covar=tensor([0.0312, 0.0234, 0.0254, 0.0254, 0.0296, 0.0357, 0.0241, 0.0349], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:2') +2023-03-09 03:43:57,204 INFO [train.py:898] (2/4) Epoch 12, batch 2850, loss[loss=0.1838, simple_loss=0.2775, pruned_loss=0.04498, over 18355.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2699, pruned_loss=0.04949, over 3602652.74 frames. ], batch size: 55, lr: 9.38e-03, grad_scale: 8.0 +2023-03-09 03:44:27,912 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.023e+02 3.014e+02 3.707e+02 4.689e+02 1.677e+03, threshold=7.413e+02, percent-clipped=4.0 +2023-03-09 03:44:34,056 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0012, 4.1610, 2.3597, 4.1339, 5.1559, 2.4226, 3.7614, 3.8068], + device='cuda:2'), covar=tensor([0.0136, 0.0941, 0.1629, 0.0457, 0.0050, 0.1323, 0.0633, 0.0753], + device='cuda:2'), in_proj_covar=tensor([0.0123, 0.0235, 0.0192, 0.0188, 0.0093, 0.0175, 0.0203, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 03:44:37,111 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42859.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:44:40,388 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42862.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:44:54,936 INFO [train.py:898] (2/4) Epoch 12, batch 2900, loss[loss=0.1654, simple_loss=0.2433, pruned_loss=0.04374, over 18505.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2695, pruned_loss=0.04952, over 3607808.66 frames. ], batch size: 44, lr: 9.37e-03, grad_scale: 8.0 +2023-03-09 03:44:59,701 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42879.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:45:33,858 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-09 03:45:41,030 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1216, 4.2959, 2.2586, 4.2591, 5.2977, 2.4251, 3.8099, 3.8936], + device='cuda:2'), covar=tensor([0.0125, 0.1076, 0.1965, 0.0564, 0.0055, 0.1665, 0.0731, 0.0741], + device='cuda:2'), in_proj_covar=tensor([0.0123, 0.0236, 0.0193, 0.0189, 0.0093, 0.0175, 0.0204, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 03:45:47,656 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42920.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:45:53,048 INFO [train.py:898] (2/4) Epoch 12, batch 2950, loss[loss=0.1471, simple_loss=0.2344, pruned_loss=0.02996, over 18232.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2685, pruned_loss=0.04893, over 3610888.42 frames. ], batch size: 45, lr: 9.36e-03, grad_scale: 8.0 +2023-03-09 03:46:11,086 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42940.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:46:24,423 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.040e+02 2.922e+02 3.740e+02 4.612e+02 1.225e+03, threshold=7.481e+02, percent-clipped=6.0 +2023-03-09 03:46:41,266 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.2357, 3.6046, 4.9767, 4.0944, 3.1720, 2.8685, 4.1675, 5.1145], + device='cuda:2'), covar=tensor([0.0968, 0.1412, 0.0128, 0.0396, 0.0928, 0.1095, 0.0400, 0.0166], + device='cuda:2'), in_proj_covar=tensor([0.0137, 0.0248, 0.0109, 0.0164, 0.0178, 0.0175, 0.0176, 0.0154], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 03:46:51,978 INFO [train.py:898] (2/4) Epoch 12, batch 3000, loss[loss=0.1657, simple_loss=0.2576, pruned_loss=0.03694, over 18488.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2689, pruned_loss=0.04934, over 3580740.40 frames. ], batch size: 47, lr: 9.36e-03, grad_scale: 8.0 +2023-03-09 03:46:51,979 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 03:47:04,090 INFO [train.py:932] (2/4) Epoch 12, validation: loss=0.1557, simple_loss=0.2578, pruned_loss=0.02677, over 944034.00 frames. +2023-03-09 03:47:04,091 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 03:47:15,097 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.83 vs. limit=5.0 +2023-03-09 03:47:40,283 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43005.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:48:03,058 INFO [train.py:898] (2/4) Epoch 12, batch 3050, loss[loss=0.1796, simple_loss=0.2616, pruned_loss=0.04879, over 17755.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2683, pruned_loss=0.04935, over 3581850.97 frames. ], batch size: 70, lr: 9.35e-03, grad_scale: 8.0 +2023-03-09 03:48:14,870 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43035.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:48:35,553 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.270e+02 3.222e+02 3.719e+02 4.518e+02 9.955e+02, threshold=7.438e+02, percent-clipped=1.0 +2023-03-09 03:48:53,784 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1434, 2.4257, 3.2353, 3.2137, 2.3015, 3.4697, 3.3368, 2.4545], + device='cuda:2'), covar=tensor([0.0404, 0.1291, 0.0337, 0.0270, 0.1485, 0.0239, 0.0471, 0.0905], + device='cuda:2'), in_proj_covar=tensor([0.0191, 0.0222, 0.0162, 0.0146, 0.0214, 0.0186, 0.0210, 0.0190], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 03:48:54,349 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-09 03:49:01,447 INFO [train.py:898] (2/4) Epoch 12, batch 3100, loss[loss=0.1612, simple_loss=0.2425, pruned_loss=0.03991, over 17691.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2691, pruned_loss=0.04963, over 3578396.12 frames. ], batch size: 39, lr: 9.35e-03, grad_scale: 8.0 +2023-03-09 03:49:11,122 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43083.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:49:30,674 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9660, 4.4090, 4.6262, 3.3287, 3.6067, 3.5266, 2.5555, 2.3239], + device='cuda:2'), covar=tensor([0.0179, 0.0157, 0.0077, 0.0302, 0.0377, 0.0229, 0.0784, 0.0900], + device='cuda:2'), in_proj_covar=tensor([0.0061, 0.0048, 0.0050, 0.0061, 0.0083, 0.0060, 0.0073, 0.0079], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0006, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 03:50:00,207 INFO [train.py:898] (2/4) Epoch 12, batch 3150, loss[loss=0.1938, simple_loss=0.2835, pruned_loss=0.05202, over 18412.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2697, pruned_loss=0.04963, over 3574615.84 frames. ], batch size: 52, lr: 9.34e-03, grad_scale: 8.0 +2023-03-09 03:50:31,049 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.182e+02 3.274e+02 3.785e+02 4.716e+02 1.243e+03, threshold=7.571e+02, percent-clipped=5.0 +2023-03-09 03:50:43,688 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43162.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:50:58,065 INFO [train.py:898] (2/4) Epoch 12, batch 3200, loss[loss=0.1907, simple_loss=0.2865, pruned_loss=0.04744, over 18385.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2706, pruned_loss=0.04999, over 3569117.44 frames. ], batch size: 52, lr: 9.34e-03, grad_scale: 8.0 +2023-03-09 03:51:39,585 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43210.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:51:45,929 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43215.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:51:57,218 INFO [train.py:898] (2/4) Epoch 12, batch 3250, loss[loss=0.1625, simple_loss=0.248, pruned_loss=0.03851, over 18012.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2697, pruned_loss=0.04981, over 3564844.39 frames. ], batch size: 40, lr: 9.33e-03, grad_scale: 8.0 +2023-03-09 03:52:08,896 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43235.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:52:09,259 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-03-09 03:52:28,374 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.179e+02 3.099e+02 3.708e+02 4.167e+02 9.547e+02, threshold=7.415e+02, percent-clipped=3.0 +2023-03-09 03:52:45,035 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.03 vs. limit=5.0 +2023-03-09 03:52:55,946 INFO [train.py:898] (2/4) Epoch 12, batch 3300, loss[loss=0.1587, simple_loss=0.2412, pruned_loss=0.03806, over 18252.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2684, pruned_loss=0.04911, over 3578515.07 frames. ], batch size: 45, lr: 9.33e-03, grad_scale: 8.0 +2023-03-09 03:53:30,997 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43305.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:53:54,818 INFO [train.py:898] (2/4) Epoch 12, batch 3350, loss[loss=0.1591, simple_loss=0.2417, pruned_loss=0.03827, over 18497.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2681, pruned_loss=0.04888, over 3585629.50 frames. ], batch size: 47, lr: 9.32e-03, grad_scale: 8.0 +2023-03-09 03:54:25,612 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.041e+02 3.064e+02 3.610e+02 4.297e+02 1.025e+03, threshold=7.219e+02, percent-clipped=3.0 +2023-03-09 03:54:26,960 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43353.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:54:53,263 INFO [train.py:898] (2/4) Epoch 12, batch 3400, loss[loss=0.1957, simple_loss=0.2903, pruned_loss=0.05059, over 18299.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2689, pruned_loss=0.04898, over 3579690.96 frames. ], batch size: 54, lr: 9.32e-03, grad_scale: 8.0 +2023-03-09 03:55:12,474 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8960, 3.8896, 4.0012, 3.8341, 3.8051, 3.8904, 4.0318, 3.9995], + device='cuda:2'), covar=tensor([0.0075, 0.0067, 0.0059, 0.0087, 0.0067, 0.0090, 0.0068, 0.0088], + device='cuda:2'), in_proj_covar=tensor([0.0081, 0.0057, 0.0060, 0.0077, 0.0064, 0.0088, 0.0074, 0.0074], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 03:55:51,949 INFO [train.py:898] (2/4) Epoch 12, batch 3450, loss[loss=0.1892, simple_loss=0.2732, pruned_loss=0.05259, over 18298.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2695, pruned_loss=0.04918, over 3576602.37 frames. ], batch size: 49, lr: 9.31e-03, grad_scale: 8.0 +2023-03-09 03:56:23,235 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.156e+02 3.064e+02 3.567e+02 4.230e+02 7.375e+02, threshold=7.135e+02, percent-clipped=1.0 +2023-03-09 03:56:51,194 INFO [train.py:898] (2/4) Epoch 12, batch 3500, loss[loss=0.1839, simple_loss=0.2694, pruned_loss=0.04922, over 17029.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2693, pruned_loss=0.04883, over 3574587.99 frames. ], batch size: 78, lr: 9.31e-03, grad_scale: 8.0 +2023-03-09 03:57:20,003 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-03-09 03:57:27,061 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9426, 3.4273, 4.6241, 4.0314, 2.9794, 4.7932, 4.1740, 3.3957], + device='cuda:2'), covar=tensor([0.0404, 0.1115, 0.0167, 0.0335, 0.1366, 0.0140, 0.0461, 0.0780], + device='cuda:2'), in_proj_covar=tensor([0.0193, 0.0223, 0.0162, 0.0147, 0.0215, 0.0189, 0.0211, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 03:57:35,107 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43514.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:57:36,025 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43515.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:57:46,261 INFO [train.py:898] (2/4) Epoch 12, batch 3550, loss[loss=0.1724, simple_loss=0.2511, pruned_loss=0.04685, over 18546.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2684, pruned_loss=0.04894, over 3587859.79 frames. ], batch size: 45, lr: 9.30e-03, grad_scale: 8.0 +2023-03-09 03:57:57,034 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43535.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:58:15,320 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.095e+02 3.008e+02 3.651e+02 4.331e+02 1.115e+03, threshold=7.301e+02, percent-clipped=5.0 +2023-03-09 03:58:27,331 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43563.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:58:34,912 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5755, 2.2720, 2.4898, 2.6201, 3.1964, 4.4627, 4.0887, 3.5864], + device='cuda:2'), covar=tensor([0.1370, 0.2032, 0.2387, 0.1511, 0.1748, 0.0197, 0.0455, 0.0547], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0310, 0.0330, 0.0252, 0.0367, 0.0199, 0.0267, 0.0214], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 03:58:40,413 INFO [train.py:898] (2/4) Epoch 12, batch 3600, loss[loss=0.1791, simple_loss=0.2658, pruned_loss=0.04617, over 18283.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2691, pruned_loss=0.04924, over 3579143.13 frames. ], batch size: 49, lr: 9.30e-03, grad_scale: 8.0 +2023-03-09 03:58:40,793 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43575.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:58:49,371 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=43583.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 03:58:53,772 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-09 03:59:46,563 INFO [train.py:898] (2/4) Epoch 13, batch 0, loss[loss=0.1863, simple_loss=0.2696, pruned_loss=0.05157, over 18256.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2696, pruned_loss=0.05157, over 18256.00 frames. ], batch size: 47, lr: 8.93e-03, grad_scale: 8.0 +2023-03-09 03:59:46,563 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 03:59:58,386 INFO [train.py:932] (2/4) Epoch 13, validation: loss=0.1568, simple_loss=0.2587, pruned_loss=0.02742, over 944034.00 frames. +2023-03-09 03:59:58,387 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 04:00:01,941 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1291, 5.0919, 5.3377, 5.2552, 5.1492, 5.8989, 5.4897, 5.3274], + device='cuda:2'), covar=tensor([0.1049, 0.0618, 0.0686, 0.0832, 0.1273, 0.0756, 0.0708, 0.1419], + device='cuda:2'), in_proj_covar=tensor([0.0315, 0.0243, 0.0258, 0.0257, 0.0302, 0.0365, 0.0245, 0.0355], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:2') +2023-03-09 04:00:07,023 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.3813, 5.2892, 5.0135, 5.0754, 4.6444, 5.1105, 5.3703, 5.2332], + device='cuda:2'), covar=tensor([0.2525, 0.1142, 0.1019, 0.1329, 0.2500, 0.1142, 0.1156, 0.1196], + device='cuda:2'), in_proj_covar=tensor([0.0532, 0.0444, 0.0337, 0.0475, 0.0645, 0.0477, 0.0626, 0.0469], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 04:00:49,453 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.977e+02 3.410e+02 4.106e+02 5.043e+02 1.786e+03, threshold=8.212e+02, percent-clipped=7.0 +2023-03-09 04:00:57,357 INFO [train.py:898] (2/4) Epoch 13, batch 50, loss[loss=0.1992, simple_loss=0.2865, pruned_loss=0.056, over 18285.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.274, pruned_loss=0.0497, over 801709.56 frames. ], batch size: 57, lr: 8.92e-03, grad_scale: 8.0 +2023-03-09 04:01:53,372 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6527, 3.2598, 3.9572, 2.8486, 3.5872, 2.6167, 2.7205, 2.2357], + device='cuda:2'), covar=tensor([0.0847, 0.0725, 0.0164, 0.0567, 0.0606, 0.1875, 0.1932, 0.1390], + device='cuda:2'), in_proj_covar=tensor([0.0203, 0.0221, 0.0125, 0.0176, 0.0235, 0.0254, 0.0294, 0.0214], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 04:01:56,183 INFO [train.py:898] (2/4) Epoch 13, batch 100, loss[loss=0.1944, simple_loss=0.2863, pruned_loss=0.05121, over 17875.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2723, pruned_loss=0.04998, over 1405404.76 frames. ], batch size: 65, lr: 8.92e-03, grad_scale: 8.0 +2023-03-09 04:02:47,116 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.935e+02 2.941e+02 3.301e+02 3.834e+02 7.601e+02, threshold=6.602e+02, percent-clipped=0.0 +2023-03-09 04:02:55,246 INFO [train.py:898] (2/4) Epoch 13, batch 150, loss[loss=0.1919, simple_loss=0.2779, pruned_loss=0.05291, over 17766.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2691, pruned_loss=0.04887, over 1881812.29 frames. ], batch size: 70, lr: 8.91e-03, grad_scale: 8.0 +2023-03-09 04:03:28,268 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43787.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:03:47,883 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-09 04:03:53,901 INFO [train.py:898] (2/4) Epoch 13, batch 200, loss[loss=0.1839, simple_loss=0.2606, pruned_loss=0.05362, over 17705.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2689, pruned_loss=0.04905, over 2275300.49 frames. ], batch size: 39, lr: 8.91e-03, grad_scale: 8.0 +2023-03-09 04:04:28,964 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-09 04:04:40,668 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43848.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:04:44,849 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.130e+02 3.045e+02 3.524e+02 4.456e+02 7.991e+02, threshold=7.049e+02, percent-clipped=5.0 +2023-03-09 04:04:53,630 INFO [train.py:898] (2/4) Epoch 13, batch 250, loss[loss=0.1523, simple_loss=0.2321, pruned_loss=0.03624, over 18464.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2691, pruned_loss=0.04879, over 2575414.67 frames. ], batch size: 43, lr: 8.90e-03, grad_scale: 8.0 +2023-03-09 04:05:06,470 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43870.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:05:52,784 INFO [train.py:898] (2/4) Epoch 13, batch 300, loss[loss=0.1677, simple_loss=0.2521, pruned_loss=0.04165, over 18400.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2687, pruned_loss=0.04825, over 2801206.96 frames. ], batch size: 48, lr: 8.90e-03, grad_scale: 8.0 +2023-03-09 04:05:53,174 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43909.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:06:43,701 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.829e+02 2.907e+02 3.635e+02 4.242e+02 7.161e+02, threshold=7.270e+02, percent-clipped=1.0 +2023-03-09 04:06:48,406 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.21 vs. limit=5.0 +2023-03-09 04:06:52,916 INFO [train.py:898] (2/4) Epoch 13, batch 350, loss[loss=0.1606, simple_loss=0.2391, pruned_loss=0.04103, over 18427.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2675, pruned_loss=0.04774, over 2970513.31 frames. ], batch size: 42, lr: 8.89e-03, grad_scale: 8.0 +2023-03-09 04:06:53,311 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43959.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:07:06,122 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43970.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:07:25,354 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-09 04:07:56,733 INFO [train.py:898] (2/4) Epoch 13, batch 400, loss[loss=0.181, simple_loss=0.262, pruned_loss=0.05004, over 18495.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2669, pruned_loss=0.04765, over 3117402.24 frames. ], batch size: 47, lr: 8.89e-03, grad_scale: 8.0 +2023-03-09 04:08:00,943 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3555, 5.9571, 5.4604, 5.6172, 5.4245, 5.3531, 5.9136, 5.9149], + device='cuda:2'), covar=tensor([0.1073, 0.0531, 0.0455, 0.0666, 0.1420, 0.0668, 0.0564, 0.0557], + device='cuda:2'), in_proj_covar=tensor([0.0537, 0.0447, 0.0340, 0.0480, 0.0659, 0.0483, 0.0630, 0.0472], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 04:08:10,362 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44020.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:08:28,999 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-09 04:08:47,393 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.657e+02 2.874e+02 3.424e+02 4.227e+02 8.593e+02, threshold=6.849e+02, percent-clipped=4.0 +2023-03-09 04:08:55,689 INFO [train.py:898] (2/4) Epoch 13, batch 450, loss[loss=0.196, simple_loss=0.2824, pruned_loss=0.05484, over 18358.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2668, pruned_loss=0.0476, over 3226410.11 frames. ], batch size: 55, lr: 8.88e-03, grad_scale: 16.0 +2023-03-09 04:09:04,932 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.92 vs. limit=2.0 +2023-03-09 04:09:54,523 INFO [train.py:898] (2/4) Epoch 13, batch 500, loss[loss=0.1818, simple_loss=0.2697, pruned_loss=0.04698, over 18388.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2665, pruned_loss=0.04771, over 3302147.11 frames. ], batch size: 50, lr: 8.88e-03, grad_scale: 16.0 +2023-03-09 04:10:34,487 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44143.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:10:44,863 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.103e+02 3.159e+02 3.633e+02 4.577e+02 9.760e+02, threshold=7.265e+02, percent-clipped=1.0 +2023-03-09 04:10:53,409 INFO [train.py:898] (2/4) Epoch 13, batch 550, loss[loss=0.1947, simple_loss=0.2786, pruned_loss=0.05539, over 18293.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2662, pruned_loss=0.0476, over 3362953.77 frames. ], batch size: 54, lr: 8.87e-03, grad_scale: 16.0 +2023-03-09 04:11:01,553 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-09 04:11:07,555 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44170.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:11:53,536 INFO [train.py:898] (2/4) Epoch 13, batch 600, loss[loss=0.183, simple_loss=0.2775, pruned_loss=0.04426, over 17247.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.266, pruned_loss=0.04749, over 3418143.99 frames. ], batch size: 78, lr: 8.87e-03, grad_scale: 16.0 +2023-03-09 04:12:04,401 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44218.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:12:10,270 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6006, 3.4472, 2.1414, 4.3825, 3.1242, 4.3786, 2.4923, 3.9606], + device='cuda:2'), covar=tensor([0.0538, 0.0760, 0.1408, 0.0451, 0.0794, 0.0293, 0.1068, 0.0367], + device='cuda:2'), in_proj_covar=tensor([0.0198, 0.0213, 0.0181, 0.0248, 0.0179, 0.0247, 0.0191, 0.0190], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 04:12:42,716 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.069e+02 3.135e+02 3.711e+02 4.524e+02 8.017e+02, threshold=7.422e+02, percent-clipped=1.0 +2023-03-09 04:12:51,315 INFO [train.py:898] (2/4) Epoch 13, batch 650, loss[loss=0.1798, simple_loss=0.2679, pruned_loss=0.04585, over 18476.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2674, pruned_loss=0.04819, over 3457554.75 frames. ], batch size: 53, lr: 8.86e-03, grad_scale: 16.0 +2023-03-09 04:12:58,027 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8677, 2.8815, 2.1334, 3.3405, 2.5362, 3.1464, 2.2472, 2.8869], + device='cuda:2'), covar=tensor([0.0467, 0.0638, 0.0920, 0.0480, 0.0609, 0.0280, 0.0845, 0.0343], + device='cuda:2'), in_proj_covar=tensor([0.0196, 0.0212, 0.0180, 0.0247, 0.0177, 0.0245, 0.0190, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 04:12:58,912 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44265.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:13:10,991 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4002, 3.2292, 2.0982, 4.2293, 2.8245, 4.0788, 2.3593, 3.7946], + device='cuda:2'), covar=tensor([0.0534, 0.0797, 0.1295, 0.0398, 0.0811, 0.0265, 0.1053, 0.0314], + device='cuda:2'), in_proj_covar=tensor([0.0197, 0.0213, 0.0181, 0.0248, 0.0177, 0.0247, 0.0190, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 04:13:48,029 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.86 vs. limit=5.0 +2023-03-09 04:13:49,368 INFO [train.py:898] (2/4) Epoch 13, batch 700, loss[loss=0.1696, simple_loss=0.2539, pruned_loss=0.04268, over 18280.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2684, pruned_loss=0.04861, over 3486193.44 frames. ], batch size: 49, lr: 8.86e-03, grad_scale: 8.0 +2023-03-09 04:13:57,550 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44315.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:14:18,531 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7119, 3.6605, 5.1182, 4.5740, 3.3897, 3.1471, 4.5419, 5.3360], + device='cuda:2'), covar=tensor([0.0810, 0.1553, 0.0133, 0.0281, 0.0800, 0.1021, 0.0325, 0.0150], + device='cuda:2'), in_proj_covar=tensor([0.0138, 0.0251, 0.0112, 0.0166, 0.0181, 0.0177, 0.0177, 0.0157], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 04:14:41,970 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.372e+02 3.099e+02 3.698e+02 4.790e+02 1.213e+03, threshold=7.395e+02, percent-clipped=5.0 +2023-03-09 04:14:48,763 INFO [train.py:898] (2/4) Epoch 13, batch 750, loss[loss=0.1966, simple_loss=0.2898, pruned_loss=0.05166, over 18402.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2686, pruned_loss=0.04828, over 3511730.56 frames. ], batch size: 52, lr: 8.85e-03, grad_scale: 8.0 +2023-03-09 04:14:55,300 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3083, 5.2004, 5.3731, 5.4294, 5.2096, 6.0105, 5.7020, 5.4973], + device='cuda:2'), covar=tensor([0.1025, 0.0613, 0.0795, 0.0687, 0.1628, 0.0769, 0.0706, 0.1608], + device='cuda:2'), in_proj_covar=tensor([0.0319, 0.0247, 0.0262, 0.0261, 0.0304, 0.0369, 0.0244, 0.0362], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:2') +2023-03-09 04:14:55,456 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.1556, 5.3282, 2.6684, 5.1190, 5.0031, 5.3533, 5.1054, 2.4607], + device='cuda:2'), covar=tensor([0.0160, 0.0057, 0.0789, 0.0072, 0.0067, 0.0052, 0.0094, 0.1095], + device='cuda:2'), in_proj_covar=tensor([0.0078, 0.0070, 0.0089, 0.0085, 0.0077, 0.0067, 0.0078, 0.0092], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 04:15:47,835 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0601, 5.0286, 5.1083, 4.8596, 4.7800, 4.9875, 5.2769, 5.2828], + device='cuda:2'), covar=tensor([0.0064, 0.0067, 0.0061, 0.0088, 0.0059, 0.0113, 0.0068, 0.0070], + device='cuda:2'), in_proj_covar=tensor([0.0082, 0.0058, 0.0061, 0.0077, 0.0065, 0.0088, 0.0075, 0.0075], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 04:15:48,583 INFO [train.py:898] (2/4) Epoch 13, batch 800, loss[loss=0.1648, simple_loss=0.2439, pruned_loss=0.04279, over 18453.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2679, pruned_loss=0.04832, over 3519742.10 frames. ], batch size: 43, lr: 8.85e-03, grad_scale: 8.0 +2023-03-09 04:16:29,527 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44443.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:16:40,513 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.201e+02 3.366e+02 4.098e+02 5.276e+02 1.273e+03, threshold=8.196e+02, percent-clipped=10.0 +2023-03-09 04:16:47,513 INFO [train.py:898] (2/4) Epoch 13, batch 850, loss[loss=0.1766, simple_loss=0.2655, pruned_loss=0.04388, over 18622.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2686, pruned_loss=0.04863, over 3538230.45 frames. ], batch size: 52, lr: 8.84e-03, grad_scale: 8.0 +2023-03-09 04:16:50,271 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5222, 2.0650, 2.4277, 2.5141, 2.8614, 5.0528, 4.5808, 3.9100], + device='cuda:2'), covar=tensor([0.1631, 0.2749, 0.3216, 0.1795, 0.3078, 0.0243, 0.0390, 0.0563], + device='cuda:2'), in_proj_covar=tensor([0.0251, 0.0309, 0.0328, 0.0252, 0.0366, 0.0195, 0.0267, 0.0213], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 04:17:26,153 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44491.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:17:45,931 INFO [train.py:898] (2/4) Epoch 13, batch 900, loss[loss=0.1634, simple_loss=0.2478, pruned_loss=0.03946, over 18281.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2692, pruned_loss=0.04868, over 3551438.07 frames. ], batch size: 47, lr: 8.84e-03, grad_scale: 8.0 +2023-03-09 04:18:07,278 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44527.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:18:34,379 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44550.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:18:37,465 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.936e+02 2.996e+02 3.521e+02 4.536e+02 7.966e+02, threshold=7.042e+02, percent-clipped=0.0 +2023-03-09 04:18:44,387 INFO [train.py:898] (2/4) Epoch 13, batch 950, loss[loss=0.1835, simple_loss=0.2732, pruned_loss=0.04689, over 18486.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2673, pruned_loss=0.04783, over 3564412.96 frames. ], batch size: 51, lr: 8.84e-03, grad_scale: 8.0 +2023-03-09 04:18:51,580 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44565.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:19:05,171 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9840, 4.9907, 5.0588, 4.7886, 4.7588, 4.8266, 5.1958, 5.2262], + device='cuda:2'), covar=tensor([0.0067, 0.0067, 0.0064, 0.0092, 0.0070, 0.0118, 0.0079, 0.0075], + device='cuda:2'), in_proj_covar=tensor([0.0082, 0.0058, 0.0061, 0.0077, 0.0065, 0.0088, 0.0075, 0.0075], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 04:19:18,571 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44588.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:19:24,164 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-09 04:19:37,094 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8785, 3.6543, 4.7632, 4.1066, 2.8713, 2.6218, 4.1963, 5.0339], + device='cuda:2'), covar=tensor([0.0707, 0.1362, 0.0152, 0.0408, 0.1094, 0.1280, 0.0456, 0.0179], + device='cuda:2'), in_proj_covar=tensor([0.0138, 0.0249, 0.0113, 0.0165, 0.0179, 0.0177, 0.0175, 0.0156], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 04:19:40,436 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.3899, 3.0429, 3.7668, 3.5922, 2.8514, 2.8568, 3.6242, 3.9409], + device='cuda:2'), covar=tensor([0.0762, 0.1260, 0.0206, 0.0347, 0.0775, 0.0865, 0.0381, 0.0336], + device='cuda:2'), in_proj_covar=tensor([0.0138, 0.0248, 0.0113, 0.0165, 0.0179, 0.0177, 0.0175, 0.0156], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 04:19:43,369 INFO [train.py:898] (2/4) Epoch 13, batch 1000, loss[loss=0.1898, simple_loss=0.2734, pruned_loss=0.05305, over 18352.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2677, pruned_loss=0.04814, over 3557217.02 frames. ], batch size: 56, lr: 8.83e-03, grad_scale: 8.0 +2023-03-09 04:19:46,159 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44611.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:19:48,240 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44613.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:19:50,670 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44615.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:20:36,783 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.889e+02 3.069e+02 3.543e+02 4.304e+02 1.212e+03, threshold=7.086e+02, percent-clipped=7.0 +2023-03-09 04:20:42,653 INFO [train.py:898] (2/4) Epoch 13, batch 1050, loss[loss=0.184, simple_loss=0.2661, pruned_loss=0.05096, over 18507.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2679, pruned_loss=0.04812, over 3558194.19 frames. ], batch size: 47, lr: 8.83e-03, grad_scale: 4.0 +2023-03-09 04:20:47,379 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=44663.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:21:41,983 INFO [train.py:898] (2/4) Epoch 13, batch 1100, loss[loss=0.1817, simple_loss=0.2678, pruned_loss=0.04777, over 18260.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2681, pruned_loss=0.04781, over 3567361.09 frames. ], batch size: 47, lr: 8.82e-03, grad_scale: 4.0 +2023-03-09 04:22:26,095 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1275, 5.0906, 5.3911, 5.3344, 5.1060, 5.9530, 5.5276, 5.2830], + device='cuda:2'), covar=tensor([0.1038, 0.0672, 0.0646, 0.0643, 0.1324, 0.0735, 0.0605, 0.1659], + device='cuda:2'), in_proj_covar=tensor([0.0325, 0.0251, 0.0263, 0.0265, 0.0307, 0.0371, 0.0247, 0.0369], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:2') +2023-03-09 04:22:35,539 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.712e+02 2.982e+02 3.567e+02 4.035e+02 7.842e+02, threshold=7.134e+02, percent-clipped=1.0 +2023-03-09 04:22:41,042 INFO [train.py:898] (2/4) Epoch 13, batch 1150, loss[loss=0.1858, simple_loss=0.2745, pruned_loss=0.04853, over 17728.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2684, pruned_loss=0.04793, over 3569944.34 frames. ], batch size: 70, lr: 8.82e-03, grad_scale: 4.0 +2023-03-09 04:22:55,347 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5990, 2.2068, 2.5925, 2.7208, 3.3338, 5.1401, 4.7684, 3.8218], + device='cuda:2'), covar=tensor([0.1346, 0.2253, 0.2656, 0.1501, 0.1986, 0.0124, 0.0319, 0.0598], + device='cuda:2'), in_proj_covar=tensor([0.0251, 0.0309, 0.0328, 0.0251, 0.0362, 0.0196, 0.0265, 0.0213], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 04:23:40,892 INFO [train.py:898] (2/4) Epoch 13, batch 1200, loss[loss=0.2049, simple_loss=0.2914, pruned_loss=0.05921, over 18231.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2674, pruned_loss=0.04763, over 3577890.97 frames. ], batch size: 60, lr: 8.81e-03, grad_scale: 8.0 +2023-03-09 04:24:33,794 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.745e+02 2.967e+02 3.663e+02 4.375e+02 1.162e+03, threshold=7.327e+02, percent-clipped=1.0 +2023-03-09 04:24:40,025 INFO [train.py:898] (2/4) Epoch 13, batch 1250, loss[loss=0.1738, simple_loss=0.2647, pruned_loss=0.04148, over 18390.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2677, pruned_loss=0.04773, over 3580107.42 frames. ], batch size: 50, lr: 8.81e-03, grad_scale: 8.0 +2023-03-09 04:25:07,188 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44883.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:25:35,354 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44906.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:25:38,456 INFO [train.py:898] (2/4) Epoch 13, batch 1300, loss[loss=0.2162, simple_loss=0.295, pruned_loss=0.06873, over 16049.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2677, pruned_loss=0.04783, over 3584878.62 frames. ], batch size: 94, lr: 8.80e-03, grad_scale: 8.0 +2023-03-09 04:25:55,228 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0027, 4.7201, 4.9483, 4.7691, 4.6803, 4.8909, 5.2005, 5.0487], + device='cuda:2'), covar=tensor([0.0084, 0.0141, 0.0102, 0.0111, 0.0098, 0.0131, 0.0098, 0.0154], + device='cuda:2'), in_proj_covar=tensor([0.0081, 0.0057, 0.0060, 0.0076, 0.0064, 0.0088, 0.0074, 0.0074], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 04:26:31,153 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.969e+02 2.987e+02 3.642e+02 4.868e+02 9.952e+02, threshold=7.283e+02, percent-clipped=3.0 +2023-03-09 04:26:36,906 INFO [train.py:898] (2/4) Epoch 13, batch 1350, loss[loss=0.1828, simple_loss=0.2736, pruned_loss=0.04602, over 18384.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2667, pruned_loss=0.04763, over 3588707.46 frames. ], batch size: 56, lr: 8.80e-03, grad_scale: 8.0 +2023-03-09 04:26:42,521 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9711, 4.9842, 4.9861, 4.7507, 4.8677, 4.8201, 5.1827, 5.2122], + device='cuda:2'), covar=tensor([0.0060, 0.0066, 0.0056, 0.0091, 0.0053, 0.0105, 0.0055, 0.0065], + device='cuda:2'), in_proj_covar=tensor([0.0080, 0.0057, 0.0060, 0.0076, 0.0064, 0.0087, 0.0074, 0.0073], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 04:26:47,449 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-09 04:27:36,006 INFO [train.py:898] (2/4) Epoch 13, batch 1400, loss[loss=0.1932, simple_loss=0.2808, pruned_loss=0.05279, over 17952.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2676, pruned_loss=0.04781, over 3581103.17 frames. ], batch size: 65, lr: 8.79e-03, grad_scale: 8.0 +2023-03-09 04:28:28,833 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.199e+02 3.129e+02 3.917e+02 4.727e+02 1.364e+03, threshold=7.834e+02, percent-clipped=4.0 +2023-03-09 04:28:32,159 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45056.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:28:35,350 INFO [train.py:898] (2/4) Epoch 13, batch 1450, loss[loss=0.2335, simple_loss=0.305, pruned_loss=0.08097, over 11872.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2682, pruned_loss=0.04781, over 3585699.91 frames. ], batch size: 130, lr: 8.79e-03, grad_scale: 8.0 +2023-03-09 04:29:01,660 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45081.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:29:32,274 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5690, 3.4866, 2.1819, 4.3798, 3.0014, 4.3476, 2.2910, 4.0559], + device='cuda:2'), covar=tensor([0.0576, 0.0743, 0.1415, 0.0495, 0.0865, 0.0281, 0.1230, 0.0317], + device='cuda:2'), in_proj_covar=tensor([0.0201, 0.0217, 0.0186, 0.0253, 0.0185, 0.0251, 0.0196, 0.0192], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 04:29:33,502 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6361, 2.3528, 2.4984, 2.8284, 3.0719, 5.2234, 4.7358, 3.8363], + device='cuda:2'), covar=tensor([0.1700, 0.2722, 0.3679, 0.1656, 0.3129, 0.0151, 0.0427, 0.0681], + device='cuda:2'), in_proj_covar=tensor([0.0254, 0.0312, 0.0331, 0.0253, 0.0366, 0.0198, 0.0268, 0.0216], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 04:29:34,132 INFO [train.py:898] (2/4) Epoch 13, batch 1500, loss[loss=0.1602, simple_loss=0.2379, pruned_loss=0.0413, over 17591.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2678, pruned_loss=0.04798, over 3571772.44 frames. ], batch size: 39, lr: 8.78e-03, grad_scale: 8.0 +2023-03-09 04:29:44,343 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45117.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 04:30:13,338 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45142.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:30:22,528 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9858, 5.4089, 5.5256, 5.4635, 5.0709, 5.4283, 4.6285, 5.3235], + device='cuda:2'), covar=tensor([0.0194, 0.0353, 0.0168, 0.0271, 0.0291, 0.0195, 0.1272, 0.0282], + device='cuda:2'), in_proj_covar=tensor([0.0184, 0.0231, 0.0219, 0.0263, 0.0233, 0.0231, 0.0290, 0.0223], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-09 04:30:22,819 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 04:30:26,576 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.379e+02 3.129e+02 3.724e+02 4.200e+02 9.663e+02, threshold=7.448e+02, percent-clipped=2.0 +2023-03-09 04:30:33,406 INFO [train.py:898] (2/4) Epoch 13, batch 1550, loss[loss=0.2113, simple_loss=0.2951, pruned_loss=0.06378, over 18204.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2678, pruned_loss=0.0482, over 3570066.09 frames. ], batch size: 60, lr: 8.78e-03, grad_scale: 8.0 +2023-03-09 04:31:01,971 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45183.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:31:28,242 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45206.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:31:32,592 INFO [train.py:898] (2/4) Epoch 13, batch 1600, loss[loss=0.1644, simple_loss=0.246, pruned_loss=0.0414, over 17714.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2671, pruned_loss=0.04793, over 3577154.47 frames. ], batch size: 39, lr: 8.77e-03, grad_scale: 8.0 +2023-03-09 04:31:57,590 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8202, 4.4154, 4.6000, 3.3235, 3.6433, 3.5800, 2.6721, 2.2759], + device='cuda:2'), covar=tensor([0.0222, 0.0187, 0.0073, 0.0329, 0.0366, 0.0206, 0.0726, 0.1042], + device='cuda:2'), in_proj_covar=tensor([0.0062, 0.0048, 0.0051, 0.0061, 0.0083, 0.0059, 0.0072, 0.0079], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0006, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 04:31:58,523 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45231.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:32:24,701 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.718e+02 3.180e+02 3.814e+02 4.660e+02 1.002e+03, threshold=7.628e+02, percent-clipped=5.0 +2023-03-09 04:32:24,934 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45254.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:32:30,336 INFO [train.py:898] (2/4) Epoch 13, batch 1650, loss[loss=0.1861, simple_loss=0.2691, pruned_loss=0.05157, over 18392.00 frames. ], tot_loss[loss=0.181, simple_loss=0.267, pruned_loss=0.04751, over 3588729.06 frames. ], batch size: 50, lr: 8.77e-03, grad_scale: 8.0 +2023-03-09 04:32:55,279 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-09 04:33:19,362 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7961, 4.4429, 4.6175, 3.1725, 3.6939, 3.6858, 2.5108, 2.3912], + device='cuda:2'), covar=tensor([0.0234, 0.0162, 0.0064, 0.0347, 0.0339, 0.0177, 0.0761, 0.0935], + device='cuda:2'), in_proj_covar=tensor([0.0062, 0.0049, 0.0051, 0.0061, 0.0082, 0.0060, 0.0072, 0.0079], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0006, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 04:33:20,433 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0257, 5.5139, 5.5732, 5.4925, 5.0464, 5.3849, 4.7934, 5.4170], + device='cuda:2'), covar=tensor([0.0227, 0.0284, 0.0142, 0.0350, 0.0311, 0.0210, 0.1101, 0.0267], + device='cuda:2'), in_proj_covar=tensor([0.0185, 0.0235, 0.0221, 0.0269, 0.0234, 0.0233, 0.0292, 0.0223], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-09 04:33:29,083 INFO [train.py:898] (2/4) Epoch 13, batch 1700, loss[loss=0.1844, simple_loss=0.2765, pruned_loss=0.04615, over 18120.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2662, pruned_loss=0.04687, over 3601456.79 frames. ], batch size: 62, lr: 8.76e-03, grad_scale: 8.0 +2023-03-09 04:33:41,770 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-03-09 04:33:53,009 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5170, 2.6772, 2.5351, 2.7048, 3.6142, 3.4353, 2.9198, 2.8438], + device='cuda:2'), covar=tensor([0.0181, 0.0330, 0.0532, 0.0335, 0.0167, 0.0151, 0.0392, 0.0371], + device='cuda:2'), in_proj_covar=tensor([0.0122, 0.0113, 0.0151, 0.0142, 0.0109, 0.0094, 0.0137, 0.0134], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 04:34:15,875 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45348.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 04:34:16,092 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 04:34:22,329 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.903e+02 2.834e+02 3.670e+02 4.509e+02 1.027e+03, threshold=7.340e+02, percent-clipped=3.0 +2023-03-09 04:34:28,115 INFO [train.py:898] (2/4) Epoch 13, batch 1750, loss[loss=0.1557, simple_loss=0.2434, pruned_loss=0.03398, over 18416.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2658, pruned_loss=0.04653, over 3612410.03 frames. ], batch size: 48, lr: 8.76e-03, grad_scale: 8.0 +2023-03-09 04:35:27,717 INFO [train.py:898] (2/4) Epoch 13, batch 1800, loss[loss=0.2154, simple_loss=0.2924, pruned_loss=0.06918, over 12629.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2664, pruned_loss=0.04687, over 3602043.10 frames. ], batch size: 129, lr: 8.75e-03, grad_scale: 8.0 +2023-03-09 04:35:28,230 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45409.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 04:35:31,494 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45412.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 04:36:01,071 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45437.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:36:21,059 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.197e+02 3.089e+02 3.615e+02 4.391e+02 8.521e+02, threshold=7.230e+02, percent-clipped=5.0 +2023-03-09 04:36:22,526 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9141, 5.0555, 4.9981, 4.8292, 4.7646, 4.8206, 5.1608, 5.2329], + device='cuda:2'), covar=tensor([0.0079, 0.0059, 0.0067, 0.0102, 0.0068, 0.0135, 0.0077, 0.0093], + device='cuda:2'), in_proj_covar=tensor([0.0083, 0.0058, 0.0062, 0.0078, 0.0066, 0.0090, 0.0075, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 04:36:26,743 INFO [train.py:898] (2/4) Epoch 13, batch 1850, loss[loss=0.1722, simple_loss=0.2505, pruned_loss=0.04699, over 18418.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2664, pruned_loss=0.04692, over 3600077.42 frames. ], batch size: 43, lr: 8.75e-03, grad_scale: 8.0 +2023-03-09 04:37:25,647 INFO [train.py:898] (2/4) Epoch 13, batch 1900, loss[loss=0.2067, simple_loss=0.2923, pruned_loss=0.06059, over 17761.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2667, pruned_loss=0.04717, over 3590952.13 frames. ], batch size: 70, lr: 8.74e-03, grad_scale: 8.0 +2023-03-09 04:38:18,702 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.016e+02 2.946e+02 3.593e+02 4.543e+02 1.018e+03, threshold=7.187e+02, percent-clipped=4.0 +2023-03-09 04:38:24,502 INFO [train.py:898] (2/4) Epoch 13, batch 1950, loss[loss=0.2099, simple_loss=0.3032, pruned_loss=0.05827, over 18159.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.267, pruned_loss=0.04758, over 3585974.07 frames. ], batch size: 62, lr: 8.74e-03, grad_scale: 8.0 +2023-03-09 04:38:41,055 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9892, 5.0825, 2.7058, 4.9060, 4.8187, 5.0644, 4.8797, 2.8255], + device='cuda:2'), covar=tensor([0.0171, 0.0055, 0.0759, 0.0085, 0.0068, 0.0065, 0.0089, 0.0864], + device='cuda:2'), in_proj_covar=tensor([0.0079, 0.0070, 0.0090, 0.0086, 0.0079, 0.0067, 0.0078, 0.0092], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 04:39:20,931 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5759, 2.4130, 2.8212, 2.9331, 3.4609, 5.2062, 4.8187, 4.1150], + device='cuda:2'), covar=tensor([0.1469, 0.2136, 0.2474, 0.1452, 0.1923, 0.0116, 0.0341, 0.0563], + device='cuda:2'), in_proj_covar=tensor([0.0258, 0.0313, 0.0333, 0.0255, 0.0369, 0.0201, 0.0270, 0.0218], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 04:39:23,847 INFO [train.py:898] (2/4) Epoch 13, batch 2000, loss[loss=0.1843, simple_loss=0.2828, pruned_loss=0.04292, over 18473.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2666, pruned_loss=0.0472, over 3587180.43 frames. ], batch size: 53, lr: 8.73e-03, grad_scale: 8.0 +2023-03-09 04:39:46,716 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-03-09 04:40:17,736 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.026e+02 2.791e+02 3.256e+02 3.955e+02 1.111e+03, threshold=6.512e+02, percent-clipped=4.0 +2023-03-09 04:40:23,327 INFO [train.py:898] (2/4) Epoch 13, batch 2050, loss[loss=0.2046, simple_loss=0.2882, pruned_loss=0.06045, over 15920.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2668, pruned_loss=0.04759, over 3570131.89 frames. ], batch size: 94, lr: 8.73e-03, grad_scale: 8.0 +2023-03-09 04:40:56,152 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-09 04:41:14,588 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45702.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:41:17,260 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45704.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 04:41:22,662 INFO [train.py:898] (2/4) Epoch 13, batch 2100, loss[loss=0.2248, simple_loss=0.2996, pruned_loss=0.07499, over 12666.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2673, pruned_loss=0.0477, over 3570395.91 frames. ], batch size: 131, lr: 8.72e-03, grad_scale: 8.0 +2023-03-09 04:41:26,375 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45712.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 04:41:35,396 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4741, 6.0986, 5.5476, 5.8670, 5.6292, 5.5465, 6.1242, 6.1054], + device='cuda:2'), covar=tensor([0.1189, 0.0614, 0.0414, 0.0627, 0.1481, 0.0760, 0.0588, 0.0603], + device='cuda:2'), in_proj_covar=tensor([0.0552, 0.0460, 0.0347, 0.0495, 0.0676, 0.0500, 0.0651, 0.0484], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 04:41:48,476 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8076, 3.6849, 4.9769, 4.3030, 3.4134, 2.8204, 4.3379, 5.2061], + device='cuda:2'), covar=tensor([0.0736, 0.1633, 0.0162, 0.0354, 0.0810, 0.1199, 0.0344, 0.0181], + device='cuda:2'), in_proj_covar=tensor([0.0139, 0.0254, 0.0115, 0.0167, 0.0182, 0.0180, 0.0178, 0.0162], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 04:41:51,863 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0051, 4.1546, 2.3377, 4.0982, 5.1121, 2.3447, 3.6266, 3.8564], + device='cuda:2'), covar=tensor([0.0102, 0.1137, 0.1765, 0.0608, 0.0055, 0.1515, 0.0723, 0.0744], + device='cuda:2'), in_proj_covar=tensor([0.0130, 0.0244, 0.0197, 0.0193, 0.0096, 0.0177, 0.0206, 0.0212], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 04:41:55,645 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45737.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:42:14,742 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.079e+02 3.037e+02 3.571e+02 4.287e+02 1.143e+03, threshold=7.142e+02, percent-clipped=3.0 +2023-03-09 04:42:21,821 INFO [train.py:898] (2/4) Epoch 13, batch 2150, loss[loss=0.1941, simple_loss=0.2804, pruned_loss=0.05388, over 17750.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2684, pruned_loss=0.04819, over 3576419.91 frames. ], batch size: 70, lr: 8.72e-03, grad_scale: 8.0 +2023-03-09 04:42:23,182 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45760.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:42:26,734 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45763.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:42:38,903 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45774.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:42:43,209 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8947, 4.9306, 4.9862, 4.6775, 4.7114, 4.7475, 5.0348, 4.9858], + device='cuda:2'), covar=tensor([0.0058, 0.0060, 0.0049, 0.0096, 0.0054, 0.0116, 0.0089, 0.0116], + device='cuda:2'), in_proj_covar=tensor([0.0081, 0.0058, 0.0060, 0.0076, 0.0064, 0.0089, 0.0074, 0.0074], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 04:42:47,667 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9012, 5.3494, 5.3257, 5.3544, 4.8539, 5.1916, 4.6834, 5.2077], + device='cuda:2'), covar=tensor([0.0194, 0.0236, 0.0167, 0.0281, 0.0340, 0.0202, 0.0950, 0.0251], + device='cuda:2'), in_proj_covar=tensor([0.0187, 0.0234, 0.0225, 0.0269, 0.0238, 0.0235, 0.0293, 0.0226], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-09 04:42:51,017 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=45785.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:43:20,404 INFO [train.py:898] (2/4) Epoch 13, batch 2200, loss[loss=0.1867, simple_loss=0.2808, pruned_loss=0.0463, over 18214.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2683, pruned_loss=0.04835, over 3573078.49 frames. ], batch size: 60, lr: 8.72e-03, grad_scale: 8.0 +2023-03-09 04:43:50,843 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45835.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:44:12,834 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.979e+02 3.106e+02 3.775e+02 4.323e+02 7.775e+02, threshold=7.551e+02, percent-clipped=4.0 +2023-03-09 04:44:18,535 INFO [train.py:898] (2/4) Epoch 13, batch 2250, loss[loss=0.1645, simple_loss=0.2489, pruned_loss=0.04007, over 18274.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2683, pruned_loss=0.04809, over 3577236.38 frames. ], batch size: 49, lr: 8.71e-03, grad_scale: 8.0 +2023-03-09 04:44:18,861 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45859.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:45:15,296 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6257, 2.8091, 2.5949, 2.7164, 3.6312, 3.5754, 3.0330, 3.0016], + device='cuda:2'), covar=tensor([0.0207, 0.0303, 0.0539, 0.0422, 0.0191, 0.0131, 0.0373, 0.0323], + device='cuda:2'), in_proj_covar=tensor([0.0122, 0.0114, 0.0153, 0.0145, 0.0108, 0.0095, 0.0139, 0.0136], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 04:45:17,116 INFO [train.py:898] (2/4) Epoch 13, batch 2300, loss[loss=0.1904, simple_loss=0.2825, pruned_loss=0.04918, over 18482.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2679, pruned_loss=0.04778, over 3586709.72 frames. ], batch size: 53, lr: 8.71e-03, grad_scale: 8.0 +2023-03-09 04:45:31,364 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45920.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:46:10,652 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.145e+02 3.100e+02 3.461e+02 4.036e+02 9.492e+02, threshold=6.921e+02, percent-clipped=1.0 +2023-03-09 04:46:15,580 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0839, 3.9309, 5.1242, 2.9720, 4.4994, 2.5799, 3.1563, 1.9511], + device='cuda:2'), covar=tensor([0.0963, 0.0748, 0.0133, 0.0775, 0.0559, 0.2420, 0.2416, 0.1913], + device='cuda:2'), in_proj_covar=tensor([0.0204, 0.0224, 0.0130, 0.0177, 0.0237, 0.0254, 0.0296, 0.0214], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 04:46:16,237 INFO [train.py:898] (2/4) Epoch 13, batch 2350, loss[loss=0.1682, simple_loss=0.2578, pruned_loss=0.03928, over 18249.00 frames. ], tot_loss[loss=0.181, simple_loss=0.267, pruned_loss=0.04748, over 3583339.11 frames. ], batch size: 47, lr: 8.70e-03, grad_scale: 8.0 +2023-03-09 04:46:54,528 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9277, 4.5111, 4.6501, 3.4348, 3.6889, 3.7048, 2.9167, 2.6214], + device='cuda:2'), covar=tensor([0.0203, 0.0144, 0.0061, 0.0262, 0.0308, 0.0162, 0.0597, 0.0758], + device='cuda:2'), in_proj_covar=tensor([0.0062, 0.0050, 0.0052, 0.0061, 0.0083, 0.0060, 0.0072, 0.0079], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0006, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 04:47:14,933 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46004.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 04:47:20,470 INFO [train.py:898] (2/4) Epoch 13, batch 2400, loss[loss=0.1911, simple_loss=0.2818, pruned_loss=0.05022, over 17826.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2675, pruned_loss=0.04749, over 3575806.33 frames. ], batch size: 70, lr: 8.70e-03, grad_scale: 8.0 +2023-03-09 04:48:11,664 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46052.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 04:48:14,683 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.917e+02 3.063e+02 3.864e+02 4.591e+02 1.180e+03, threshold=7.727e+02, percent-clipped=5.0 +2023-03-09 04:48:18,389 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46058.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:48:19,203 INFO [train.py:898] (2/4) Epoch 13, batch 2450, loss[loss=0.1572, simple_loss=0.2362, pruned_loss=0.03913, over 18259.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2675, pruned_loss=0.04745, over 3581260.63 frames. ], batch size: 45, lr: 8.69e-03, grad_scale: 8.0 +2023-03-09 04:49:09,863 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.46 vs. limit=5.0 +2023-03-09 04:49:18,307 INFO [train.py:898] (2/4) Epoch 13, batch 2500, loss[loss=0.193, simple_loss=0.2789, pruned_loss=0.05357, over 16123.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2674, pruned_loss=0.04725, over 3590166.36 frames. ], batch size: 94, lr: 8.69e-03, grad_scale: 8.0 +2023-03-09 04:49:19,715 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3790, 3.2291, 1.7194, 4.1715, 2.8531, 3.7623, 1.8305, 3.4882], + device='cuda:2'), covar=tensor([0.0625, 0.0896, 0.1829, 0.0534, 0.0967, 0.0381, 0.1584, 0.0524], + device='cuda:2'), in_proj_covar=tensor([0.0202, 0.0216, 0.0183, 0.0252, 0.0182, 0.0252, 0.0195, 0.0190], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 04:49:42,835 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46130.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:50:11,943 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.884e+02 3.026e+02 3.575e+02 4.443e+02 8.459e+02, threshold=7.149e+02, percent-clipped=1.0 +2023-03-09 04:50:16,972 INFO [train.py:898] (2/4) Epoch 13, batch 2550, loss[loss=0.1852, simple_loss=0.2639, pruned_loss=0.05324, over 18248.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.267, pruned_loss=0.047, over 3602155.82 frames. ], batch size: 45, lr: 8.68e-03, grad_scale: 8.0 +2023-03-09 04:50:26,027 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7281, 2.2144, 2.7569, 2.8599, 3.4425, 5.2085, 4.7265, 3.9568], + device='cuda:2'), covar=tensor([0.1352, 0.2146, 0.2529, 0.1423, 0.1821, 0.0170, 0.0366, 0.0578], + device='cuda:2'), in_proj_covar=tensor([0.0259, 0.0315, 0.0336, 0.0254, 0.0368, 0.0202, 0.0272, 0.0220], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 04:51:15,928 INFO [train.py:898] (2/4) Epoch 13, batch 2600, loss[loss=0.1789, simple_loss=0.2672, pruned_loss=0.04526, over 17246.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2664, pruned_loss=0.04686, over 3596677.23 frames. ], batch size: 78, lr: 8.68e-03, grad_scale: 8.0 +2023-03-09 04:51:23,619 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46215.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:52:10,073 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.796e+02 3.019e+02 3.586e+02 4.587e+02 8.864e+02, threshold=7.172e+02, percent-clipped=4.0 +2023-03-09 04:52:14,636 INFO [train.py:898] (2/4) Epoch 13, batch 2650, loss[loss=0.1939, simple_loss=0.2798, pruned_loss=0.05399, over 18142.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2665, pruned_loss=0.04678, over 3587301.08 frames. ], batch size: 62, lr: 8.67e-03, grad_scale: 8.0 +2023-03-09 04:52:35,510 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46276.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:53:14,121 INFO [train.py:898] (2/4) Epoch 13, batch 2700, loss[loss=0.155, simple_loss=0.236, pruned_loss=0.03697, over 18263.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2664, pruned_loss=0.04679, over 3577668.18 frames. ], batch size: 45, lr: 8.67e-03, grad_scale: 8.0 +2023-03-09 04:53:47,889 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46337.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:54:08,566 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.122e+02 2.828e+02 3.269e+02 4.141e+02 7.295e+02, threshold=6.537e+02, percent-clipped=1.0 +2023-03-09 04:54:12,543 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46358.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:54:13,368 INFO [train.py:898] (2/4) Epoch 13, batch 2750, loss[loss=0.1571, simple_loss=0.2402, pruned_loss=0.03703, over 18492.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2663, pruned_loss=0.04669, over 3578866.85 frames. ], batch size: 44, lr: 8.66e-03, grad_scale: 8.0 +2023-03-09 04:55:07,770 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=5.01 vs. limit=5.0 +2023-03-09 04:55:09,399 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46406.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:55:12,677 INFO [train.py:898] (2/4) Epoch 13, batch 2800, loss[loss=0.1722, simple_loss=0.2575, pruned_loss=0.04351, over 18398.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2662, pruned_loss=0.0469, over 3580788.99 frames. ], batch size: 48, lr: 8.66e-03, grad_scale: 8.0 +2023-03-09 04:55:38,133 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46430.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:55:40,614 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 04:56:07,316 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.145e+02 3.038e+02 3.515e+02 4.163e+02 1.533e+03, threshold=7.029e+02, percent-clipped=2.0 +2023-03-09 04:56:11,758 INFO [train.py:898] (2/4) Epoch 13, batch 2850, loss[loss=0.1775, simple_loss=0.2688, pruned_loss=0.0431, over 18312.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2664, pruned_loss=0.04699, over 3577255.63 frames. ], batch size: 54, lr: 8.65e-03, grad_scale: 8.0 +2023-03-09 04:56:34,784 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46478.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:56:38,376 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5105, 5.9985, 5.4465, 5.6943, 5.5632, 5.4446, 6.0473, 5.9698], + device='cuda:2'), covar=tensor([0.1097, 0.0631, 0.0481, 0.0740, 0.1295, 0.0739, 0.0508, 0.0630], + device='cuda:2'), in_proj_covar=tensor([0.0558, 0.0467, 0.0355, 0.0504, 0.0686, 0.0506, 0.0655, 0.0496], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 04:57:10,806 INFO [train.py:898] (2/4) Epoch 13, batch 2900, loss[loss=0.2202, simple_loss=0.2963, pruned_loss=0.07203, over 12626.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.267, pruned_loss=0.04713, over 3574116.72 frames. ], batch size: 130, lr: 8.65e-03, grad_scale: 8.0 +2023-03-09 04:57:14,791 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6486, 2.1291, 2.6051, 2.6183, 3.2756, 5.0962, 4.5102, 4.0017], + device='cuda:2'), covar=tensor([0.1411, 0.2213, 0.2718, 0.1580, 0.2055, 0.0122, 0.0417, 0.0524], + device='cuda:2'), in_proj_covar=tensor([0.0259, 0.0313, 0.0336, 0.0254, 0.0367, 0.0200, 0.0271, 0.0219], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 04:57:18,194 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46515.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:57:23,248 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-09 04:58:05,313 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.838e+02 2.971e+02 3.539e+02 4.206e+02 8.689e+02, threshold=7.079e+02, percent-clipped=3.0 +2023-03-09 04:58:09,928 INFO [train.py:898] (2/4) Epoch 13, batch 2950, loss[loss=0.1629, simple_loss=0.2424, pruned_loss=0.04168, over 18266.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2664, pruned_loss=0.04684, over 3574473.60 frames. ], batch size: 47, lr: 8.65e-03, grad_scale: 8.0 +2023-03-09 04:58:14,676 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46563.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:58:52,116 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46595.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:58:56,586 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.92 vs. limit=5.0 +2023-03-09 04:59:01,098 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46602.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 04:59:09,195 INFO [train.py:898] (2/4) Epoch 13, batch 3000, loss[loss=0.2032, simple_loss=0.2932, pruned_loss=0.05658, over 18081.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2662, pruned_loss=0.04675, over 3584760.24 frames. ], batch size: 62, lr: 8.64e-03, grad_scale: 8.0 +2023-03-09 04:59:09,195 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 04:59:21,018 INFO [train.py:932] (2/4) Epoch 13, validation: loss=0.1542, simple_loss=0.256, pruned_loss=0.02615, over 944034.00 frames. +2023-03-09 04:59:21,019 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 04:59:42,511 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5970, 2.9091, 2.5092, 2.8760, 3.6587, 3.6309, 3.0489, 2.9936], + device='cuda:2'), covar=tensor([0.0175, 0.0244, 0.0543, 0.0335, 0.0142, 0.0148, 0.0358, 0.0310], + device='cuda:2'), in_proj_covar=tensor([0.0122, 0.0113, 0.0153, 0.0144, 0.0108, 0.0097, 0.0138, 0.0136], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 04:59:47,427 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-03-09 04:59:47,885 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46632.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:00:15,395 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.844e+02 2.916e+02 3.524e+02 4.268e+02 9.781e+02, threshold=7.048e+02, percent-clipped=5.0 +2023-03-09 05:00:17,066 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46656.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 05:00:19,947 INFO [train.py:898] (2/4) Epoch 13, batch 3050, loss[loss=0.1859, simple_loss=0.2735, pruned_loss=0.04919, over 17795.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2673, pruned_loss=0.04736, over 3586813.43 frames. ], batch size: 70, lr: 8.64e-03, grad_scale: 8.0 +2023-03-09 05:00:24,808 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46663.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:00:44,884 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8036, 4.4280, 4.5998, 3.5585, 3.7783, 3.3570, 2.5089, 2.3883], + device='cuda:2'), covar=tensor([0.0199, 0.0165, 0.0060, 0.0234, 0.0294, 0.0238, 0.0745, 0.0895], + device='cuda:2'), in_proj_covar=tensor([0.0061, 0.0051, 0.0053, 0.0062, 0.0084, 0.0060, 0.0073, 0.0080], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0006, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 05:01:00,444 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-09 05:01:18,396 INFO [train.py:898] (2/4) Epoch 13, batch 3100, loss[loss=0.1627, simple_loss=0.2449, pruned_loss=0.04026, over 18562.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2669, pruned_loss=0.04677, over 3592033.31 frames. ], batch size: 45, lr: 8.63e-03, grad_scale: 8.0 +2023-03-09 05:01:24,968 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 05:02:00,455 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2846, 5.2846, 4.6842, 5.1932, 5.2491, 4.6886, 5.1138, 4.8395], + device='cuda:2'), covar=tensor([0.0529, 0.0542, 0.1899, 0.0934, 0.0598, 0.0547, 0.0618, 0.1165], + device='cuda:2'), in_proj_covar=tensor([0.0421, 0.0494, 0.0647, 0.0386, 0.0373, 0.0445, 0.0476, 0.0612], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 05:02:12,199 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.124e+02 3.204e+02 3.767e+02 4.418e+02 1.241e+03, threshold=7.535e+02, percent-clipped=5.0 +2023-03-09 05:02:16,839 INFO [train.py:898] (2/4) Epoch 13, batch 3150, loss[loss=0.1855, simple_loss=0.2707, pruned_loss=0.05011, over 18552.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2668, pruned_loss=0.0469, over 3596477.67 frames. ], batch size: 49, lr: 8.63e-03, grad_scale: 8.0 +2023-03-09 05:02:28,690 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8156, 4.9081, 4.9368, 4.6876, 4.6966, 4.6960, 5.0363, 5.1019], + device='cuda:2'), covar=tensor([0.0077, 0.0069, 0.0063, 0.0098, 0.0068, 0.0147, 0.0089, 0.0094], + device='cuda:2'), in_proj_covar=tensor([0.0085, 0.0061, 0.0063, 0.0081, 0.0068, 0.0093, 0.0078, 0.0078], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003], + device='cuda:2') +2023-03-09 05:03:16,837 INFO [train.py:898] (2/4) Epoch 13, batch 3200, loss[loss=0.1888, simple_loss=0.2755, pruned_loss=0.05103, over 18489.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2667, pruned_loss=0.0469, over 3589931.74 frames. ], batch size: 53, lr: 8.62e-03, grad_scale: 8.0 +2023-03-09 05:03:18,441 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46810.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:03:37,633 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-09 05:04:05,389 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1954, 4.0778, 5.2658, 3.0737, 4.6436, 2.7162, 3.3031, 2.0182], + device='cuda:2'), covar=tensor([0.0857, 0.0721, 0.0080, 0.0742, 0.0472, 0.2218, 0.2171, 0.1815], + device='cuda:2'), in_proj_covar=tensor([0.0206, 0.0225, 0.0133, 0.0179, 0.0239, 0.0256, 0.0299, 0.0217], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 05:04:10,410 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.338e+02 3.187e+02 3.650e+02 4.694e+02 1.027e+03, threshold=7.300e+02, percent-clipped=4.0 +2023-03-09 05:04:15,584 INFO [train.py:898] (2/4) Epoch 13, batch 3250, loss[loss=0.1921, simple_loss=0.2836, pruned_loss=0.05029, over 18616.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2663, pruned_loss=0.04687, over 3584544.48 frames. ], batch size: 52, lr: 8.62e-03, grad_scale: 8.0 +2023-03-09 05:04:21,720 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9302, 3.3830, 2.5129, 3.3726, 4.0549, 2.4637, 3.1706, 3.2988], + device='cuda:2'), covar=tensor([0.0204, 0.1036, 0.1361, 0.0612, 0.0109, 0.1211, 0.0785, 0.0805], + device='cuda:2'), in_proj_covar=tensor([0.0130, 0.0241, 0.0194, 0.0190, 0.0096, 0.0175, 0.0204, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 05:04:29,678 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46871.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:05:08,194 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8773, 4.6969, 4.8207, 4.6439, 4.6365, 4.7907, 5.0706, 4.9530], + device='cuda:2'), covar=tensor([0.0110, 0.0128, 0.0114, 0.0127, 0.0096, 0.0154, 0.0118, 0.0187], + device='cuda:2'), in_proj_covar=tensor([0.0085, 0.0060, 0.0063, 0.0080, 0.0067, 0.0092, 0.0078, 0.0077], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 05:05:14,056 INFO [train.py:898] (2/4) Epoch 13, batch 3300, loss[loss=0.1903, simple_loss=0.2843, pruned_loss=0.04809, over 18208.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2673, pruned_loss=0.04716, over 3576383.13 frames. ], batch size: 60, lr: 8.61e-03, grad_scale: 8.0 +2023-03-09 05:05:23,415 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4546, 3.3528, 2.2013, 4.1951, 2.8388, 4.1498, 1.9174, 3.7577], + device='cuda:2'), covar=tensor([0.0548, 0.0709, 0.1304, 0.0504, 0.0806, 0.0319, 0.1332, 0.0398], + device='cuda:2'), in_proj_covar=tensor([0.0196, 0.0213, 0.0180, 0.0248, 0.0180, 0.0247, 0.0192, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 05:05:41,186 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46932.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:06:03,426 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46951.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 05:06:07,488 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.79 vs. limit=5.0 +2023-03-09 05:06:07,663 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.054e+02 2.913e+02 3.556e+02 4.553e+02 1.517e+03, threshold=7.113e+02, percent-clipped=7.0 +2023-03-09 05:06:11,435 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46958.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:06:12,393 INFO [train.py:898] (2/4) Epoch 13, batch 3350, loss[loss=0.1736, simple_loss=0.2636, pruned_loss=0.0418, over 18618.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2672, pruned_loss=0.04718, over 3580389.21 frames. ], batch size: 52, lr: 8.61e-03, grad_scale: 8.0 +2023-03-09 05:06:38,286 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=46980.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:07:03,231 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2725, 5.1748, 5.4428, 5.4272, 5.1278, 5.9395, 5.5636, 5.2344], + device='cuda:2'), covar=tensor([0.0876, 0.0666, 0.0685, 0.0682, 0.1372, 0.0764, 0.0729, 0.1548], + device='cuda:2'), in_proj_covar=tensor([0.0316, 0.0250, 0.0267, 0.0265, 0.0300, 0.0374, 0.0244, 0.0366], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:2') +2023-03-09 05:07:12,263 INFO [train.py:898] (2/4) Epoch 13, batch 3400, loss[loss=0.1803, simple_loss=0.2701, pruned_loss=0.04527, over 18285.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2669, pruned_loss=0.04712, over 3587944.75 frames. ], batch size: 57, lr: 8.60e-03, grad_scale: 8.0 +2023-03-09 05:07:13,893 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6532, 3.5239, 2.6232, 4.4734, 3.0768, 4.4594, 2.5780, 4.0748], + device='cuda:2'), covar=tensor([0.0582, 0.0698, 0.1110, 0.0367, 0.0743, 0.0238, 0.1025, 0.0334], + device='cuda:2'), in_proj_covar=tensor([0.0196, 0.0212, 0.0180, 0.0247, 0.0178, 0.0245, 0.0191, 0.0188], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 05:07:45,466 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5263, 5.4416, 5.0977, 5.4705, 5.3677, 4.8022, 5.3793, 5.1016], + device='cuda:2'), covar=tensor([0.0344, 0.0363, 0.1230, 0.0578, 0.0557, 0.0382, 0.0370, 0.0939], + device='cuda:2'), in_proj_covar=tensor([0.0434, 0.0504, 0.0661, 0.0391, 0.0386, 0.0453, 0.0483, 0.0623], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 05:08:07,265 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.855e+02 2.739e+02 3.192e+02 3.909e+02 7.528e+02, threshold=6.383e+02, percent-clipped=0.0 +2023-03-09 05:08:11,944 INFO [train.py:898] (2/4) Epoch 13, batch 3450, loss[loss=0.1877, simple_loss=0.2741, pruned_loss=0.05068, over 15985.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2666, pruned_loss=0.04682, over 3591401.80 frames. ], batch size: 94, lr: 8.60e-03, grad_scale: 8.0 +2023-03-09 05:08:12,755 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.95 vs. limit=5.0 +2023-03-09 05:08:36,645 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-03-09 05:08:39,107 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-03-09 05:09:01,657 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7884, 5.3388, 5.3053, 5.3764, 4.8856, 5.2654, 4.6590, 5.1862], + device='cuda:2'), covar=tensor([0.0246, 0.0305, 0.0204, 0.0314, 0.0353, 0.0213, 0.1146, 0.0377], + device='cuda:2'), in_proj_covar=tensor([0.0190, 0.0238, 0.0223, 0.0270, 0.0239, 0.0236, 0.0294, 0.0229], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 05:09:11,342 INFO [train.py:898] (2/4) Epoch 13, batch 3500, loss[loss=0.1797, simple_loss=0.2611, pruned_loss=0.04911, over 18369.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2667, pruned_loss=0.04685, over 3587638.85 frames. ], batch size: 46, lr: 8.60e-03, grad_scale: 8.0 +2023-03-09 05:10:03,841 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.269e+02 2.980e+02 3.583e+02 4.065e+02 6.381e+02, threshold=7.167e+02, percent-clipped=1.0 +2023-03-09 05:10:08,148 INFO [train.py:898] (2/4) Epoch 13, batch 3550, loss[loss=0.1894, simple_loss=0.2774, pruned_loss=0.0507, over 18489.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2652, pruned_loss=0.04632, over 3599091.40 frames. ], batch size: 53, lr: 8.59e-03, grad_scale: 8.0 +2023-03-09 05:10:15,865 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47166.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:10:50,869 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.57 vs. limit=5.0 +2023-03-09 05:10:56,869 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9604, 5.4450, 5.4140, 5.4936, 4.9663, 5.3156, 4.7738, 5.3571], + device='cuda:2'), covar=tensor([0.0185, 0.0237, 0.0175, 0.0288, 0.0297, 0.0211, 0.0955, 0.0247], + device='cuda:2'), in_proj_covar=tensor([0.0189, 0.0238, 0.0223, 0.0271, 0.0241, 0.0237, 0.0295, 0.0228], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 05:11:02,841 INFO [train.py:898] (2/4) Epoch 13, batch 3600, loss[loss=0.1796, simple_loss=0.2761, pruned_loss=0.0416, over 18351.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.266, pruned_loss=0.0466, over 3601317.04 frames. ], batch size: 55, lr: 8.59e-03, grad_scale: 8.0 +2023-03-09 05:11:36,474 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47241.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:12:09,409 INFO [train.py:898] (2/4) Epoch 14, batch 0, loss[loss=0.1806, simple_loss=0.2637, pruned_loss=0.04875, over 18255.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2637, pruned_loss=0.04875, over 18255.00 frames. ], batch size: 47, lr: 8.27e-03, grad_scale: 8.0 +2023-03-09 05:12:09,409 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 05:12:21,329 INFO [train.py:932] (2/4) Epoch 14, validation: loss=0.155, simple_loss=0.2569, pruned_loss=0.0266, over 944034.00 frames. +2023-03-09 05:12:21,329 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 05:12:31,199 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47251.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:12:35,444 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.980e+02 3.395e+02 4.114e+02 5.070e+02 1.381e+03, threshold=8.228e+02, percent-clipped=11.0 +2023-03-09 05:12:39,273 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47258.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:12:48,131 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9012, 4.7638, 4.8794, 4.7599, 4.6623, 4.7630, 5.1006, 5.1157], + device='cuda:2'), covar=tensor([0.0074, 0.0097, 0.0074, 0.0098, 0.0082, 0.0128, 0.0090, 0.0103], + device='cuda:2'), in_proj_covar=tensor([0.0082, 0.0058, 0.0062, 0.0078, 0.0065, 0.0089, 0.0075, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 05:13:19,480 INFO [train.py:898] (2/4) Epoch 14, batch 50, loss[loss=0.1637, simple_loss=0.2587, pruned_loss=0.03441, over 18498.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2649, pruned_loss=0.04534, over 825432.06 frames. ], batch size: 51, lr: 8.27e-03, grad_scale: 8.0 +2023-03-09 05:13:26,880 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47299.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:13:30,390 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47302.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:13:35,435 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47306.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:14:18,549 INFO [train.py:898] (2/4) Epoch 14, batch 100, loss[loss=0.1456, simple_loss=0.2282, pruned_loss=0.03156, over 17676.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2648, pruned_loss=0.04493, over 1445118.24 frames. ], batch size: 39, lr: 8.26e-03, grad_scale: 8.0 +2023-03-09 05:14:29,906 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7181, 3.6701, 5.1499, 4.6307, 3.3815, 3.1348, 4.5593, 5.3687], + device='cuda:2'), covar=tensor([0.0824, 0.1524, 0.0145, 0.0308, 0.0864, 0.1090, 0.0342, 0.0155], + device='cuda:2'), in_proj_covar=tensor([0.0136, 0.0250, 0.0115, 0.0163, 0.0178, 0.0176, 0.0176, 0.0159], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 05:14:32,861 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.774e+02 2.933e+02 3.364e+02 4.152e+02 6.819e+02, threshold=6.727e+02, percent-clipped=0.0 +2023-03-09 05:15:02,534 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47379.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:15:16,211 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9406, 4.6358, 2.6299, 4.5269, 4.4244, 4.6290, 4.4364, 2.4466], + device='cuda:2'), covar=tensor([0.0166, 0.0085, 0.0834, 0.0122, 0.0097, 0.0106, 0.0136, 0.1221], + device='cuda:2'), in_proj_covar=tensor([0.0078, 0.0071, 0.0089, 0.0086, 0.0078, 0.0067, 0.0078, 0.0092], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 05:15:18,136 INFO [train.py:898] (2/4) Epoch 14, batch 150, loss[loss=0.1477, simple_loss=0.231, pruned_loss=0.03221, over 17259.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2667, pruned_loss=0.04607, over 1901173.52 frames. ], batch size: 38, lr: 8.26e-03, grad_scale: 8.0 +2023-03-09 05:16:15,654 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47440.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:16:18,709 INFO [train.py:898] (2/4) Epoch 14, batch 200, loss[loss=0.1528, simple_loss=0.238, pruned_loss=0.03376, over 18579.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2661, pruned_loss=0.0459, over 2283282.82 frames. ], batch size: 45, lr: 8.25e-03, grad_scale: 8.0 +2023-03-09 05:16:25,385 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-09 05:16:32,554 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.856e+02 2.798e+02 3.533e+02 4.067e+02 1.064e+03, threshold=7.066e+02, percent-clipped=5.0 +2023-03-09 05:16:36,987 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7122, 4.6912, 4.7992, 4.5600, 4.5347, 4.6770, 4.9460, 4.8901], + device='cuda:2'), covar=tensor([0.0082, 0.0091, 0.0076, 0.0117, 0.0069, 0.0127, 0.0090, 0.0128], + device='cuda:2'), in_proj_covar=tensor([0.0084, 0.0060, 0.0063, 0.0080, 0.0067, 0.0091, 0.0077, 0.0077], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 05:16:46,988 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47466.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:17:18,341 INFO [train.py:898] (2/4) Epoch 14, batch 250, loss[loss=0.1705, simple_loss=0.2534, pruned_loss=0.04377, over 18270.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2656, pruned_loss=0.04603, over 2579406.62 frames. ], batch size: 49, lr: 8.25e-03, grad_scale: 8.0 +2023-03-09 05:17:43,445 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47514.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:18:16,892 INFO [train.py:898] (2/4) Epoch 14, batch 300, loss[loss=0.1895, simple_loss=0.2702, pruned_loss=0.05439, over 18278.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2667, pruned_loss=0.04666, over 2802730.96 frames. ], batch size: 47, lr: 8.24e-03, grad_scale: 8.0 +2023-03-09 05:18:30,803 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.041e+02 3.040e+02 3.610e+02 4.364e+02 8.752e+02, threshold=7.221e+02, percent-clipped=2.0 +2023-03-09 05:19:16,498 INFO [train.py:898] (2/4) Epoch 14, batch 350, loss[loss=0.1606, simple_loss=0.2487, pruned_loss=0.03628, over 18543.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2654, pruned_loss=0.04614, over 2983815.84 frames. ], batch size: 49, lr: 8.24e-03, grad_scale: 8.0 +2023-03-09 05:19:21,105 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47597.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:20:07,635 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8374, 2.6279, 4.4564, 4.2691, 2.4503, 4.8361, 4.0198, 2.8983], + device='cuda:2'), covar=tensor([0.0363, 0.1901, 0.0206, 0.0196, 0.2013, 0.0186, 0.0469, 0.1208], + device='cuda:2'), in_proj_covar=tensor([0.0196, 0.0226, 0.0168, 0.0147, 0.0213, 0.0191, 0.0217, 0.0190], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 05:20:15,249 INFO [train.py:898] (2/4) Epoch 14, batch 400, loss[loss=0.1517, simple_loss=0.2367, pruned_loss=0.03335, over 18403.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.266, pruned_loss=0.04633, over 3107590.00 frames. ], batch size: 42, lr: 8.24e-03, grad_scale: 8.0 +2023-03-09 05:20:28,442 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.702e+02 2.895e+02 3.399e+02 4.132e+02 9.040e+02, threshold=6.798e+02, percent-clipped=2.0 +2023-03-09 05:21:14,180 INFO [train.py:898] (2/4) Epoch 14, batch 450, loss[loss=0.177, simple_loss=0.2596, pruned_loss=0.04723, over 18414.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.266, pruned_loss=0.04613, over 3223161.23 frames. ], batch size: 48, lr: 8.23e-03, grad_scale: 8.0 +2023-03-09 05:22:04,132 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47735.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:22:05,387 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9832, 5.1009, 5.1884, 4.9385, 4.9150, 4.8986, 5.2027, 5.2655], + device='cuda:2'), covar=tensor([0.0080, 0.0049, 0.0044, 0.0082, 0.0052, 0.0119, 0.0088, 0.0078], + device='cuda:2'), in_proj_covar=tensor([0.0084, 0.0059, 0.0062, 0.0079, 0.0066, 0.0090, 0.0076, 0.0076], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 05:22:07,640 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47738.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:22:11,789 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6078, 2.0712, 2.5491, 2.5037, 3.1234, 4.8591, 4.5232, 3.4825], + device='cuda:2'), covar=tensor([0.1437, 0.2336, 0.2551, 0.1675, 0.2080, 0.0149, 0.0399, 0.0697], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0319, 0.0339, 0.0258, 0.0372, 0.0204, 0.0275, 0.0223], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 05:22:13,608 INFO [train.py:898] (2/4) Epoch 14, batch 500, loss[loss=0.1483, simple_loss=0.2235, pruned_loss=0.03656, over 18495.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2651, pruned_loss=0.04565, over 3317420.49 frames. ], batch size: 44, lr: 8.23e-03, grad_scale: 8.0 +2023-03-09 05:22:27,493 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.904e+02 2.746e+02 3.457e+02 4.087e+02 8.380e+02, threshold=6.915e+02, percent-clipped=1.0 +2023-03-09 05:22:59,186 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0689, 4.3161, 2.3739, 4.3791, 5.2132, 2.4859, 3.8770, 4.0600], + device='cuda:2'), covar=tensor([0.0087, 0.0950, 0.1616, 0.0447, 0.0054, 0.1261, 0.0620, 0.0602], + device='cuda:2'), in_proj_covar=tensor([0.0134, 0.0250, 0.0198, 0.0193, 0.0099, 0.0177, 0.0209, 0.0213], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 05:23:06,874 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47788.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:23:12,640 INFO [train.py:898] (2/4) Epoch 14, batch 550, loss[loss=0.1653, simple_loss=0.2417, pruned_loss=0.04448, over 17689.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2638, pruned_loss=0.04505, over 3379852.87 frames. ], batch size: 39, lr: 8.22e-03, grad_scale: 8.0 +2023-03-09 05:23:20,475 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47799.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:23:31,887 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47809.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 05:24:12,054 INFO [train.py:898] (2/4) Epoch 14, batch 600, loss[loss=0.1833, simple_loss=0.2757, pruned_loss=0.0454, over 18287.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2642, pruned_loss=0.0456, over 3414051.79 frames. ], batch size: 57, lr: 8.22e-03, grad_scale: 8.0 +2023-03-09 05:24:19,699 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47849.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:24:25,848 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.360e+02 2.943e+02 3.448e+02 4.485e+02 1.001e+03, threshold=6.896e+02, percent-clipped=3.0 +2023-03-09 05:24:43,133 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47870.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 05:25:00,032 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-09 05:25:10,123 INFO [train.py:898] (2/4) Epoch 14, batch 650, loss[loss=0.1574, simple_loss=0.2369, pruned_loss=0.03894, over 18152.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2634, pruned_loss=0.0456, over 3465184.37 frames. ], batch size: 44, lr: 8.21e-03, grad_scale: 8.0 +2023-03-09 05:25:15,497 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47897.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:26:09,519 INFO [train.py:898] (2/4) Epoch 14, batch 700, loss[loss=0.1738, simple_loss=0.2587, pruned_loss=0.04439, over 18268.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2646, pruned_loss=0.04608, over 3501960.81 frames. ], batch size: 49, lr: 8.21e-03, grad_scale: 8.0 +2023-03-09 05:26:12,030 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=47945.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:26:23,811 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.052e+02 2.964e+02 3.596e+02 4.648e+02 7.874e+02, threshold=7.192e+02, percent-clipped=3.0 +2023-03-09 05:27:08,327 INFO [train.py:898] (2/4) Epoch 14, batch 750, loss[loss=0.1959, simple_loss=0.2855, pruned_loss=0.05313, over 18284.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2633, pruned_loss=0.04588, over 3522932.33 frames. ], batch size: 57, lr: 8.21e-03, grad_scale: 16.0 +2023-03-09 05:27:13,923 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7856, 3.6210, 3.4672, 3.0843, 3.3578, 2.6977, 2.7516, 3.6199], + device='cuda:2'), covar=tensor([0.0039, 0.0087, 0.0077, 0.0109, 0.0072, 0.0171, 0.0164, 0.0053], + device='cuda:2'), in_proj_covar=tensor([0.0111, 0.0135, 0.0116, 0.0167, 0.0118, 0.0164, 0.0165, 0.0098], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 05:27:21,989 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8516, 4.4495, 4.6821, 3.3095, 3.6738, 3.3549, 2.5672, 2.5170], + device='cuda:2'), covar=tensor([0.0186, 0.0185, 0.0055, 0.0311, 0.0335, 0.0232, 0.0753, 0.0867], + device='cuda:2'), in_proj_covar=tensor([0.0062, 0.0052, 0.0053, 0.0063, 0.0085, 0.0061, 0.0074, 0.0081], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0004, 0.0006, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 05:27:40,803 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.89 vs. limit=2.0 +2023-03-09 05:28:03,254 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48035.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:28:12,890 INFO [train.py:898] (2/4) Epoch 14, batch 800, loss[loss=0.184, simple_loss=0.2731, pruned_loss=0.04746, over 18566.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2635, pruned_loss=0.04578, over 3538035.86 frames. ], batch size: 54, lr: 8.20e-03, grad_scale: 8.0 +2023-03-09 05:28:28,355 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.329e+02 3.109e+02 3.556e+02 4.248e+02 1.366e+03, threshold=7.111e+02, percent-clipped=4.0 +2023-03-09 05:28:48,057 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48072.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:29:00,402 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48083.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:29:04,138 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1610, 2.8263, 3.0016, 2.5743, 2.8634, 2.1307, 2.3741, 2.8305], + device='cuda:2'), covar=tensor([0.0081, 0.0160, 0.0122, 0.0183, 0.0128, 0.0295, 0.0246, 0.0112], + device='cuda:2'), in_proj_covar=tensor([0.0110, 0.0134, 0.0116, 0.0166, 0.0117, 0.0163, 0.0165, 0.0097], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 05:29:11,649 INFO [train.py:898] (2/4) Epoch 14, batch 850, loss[loss=0.1902, simple_loss=0.2758, pruned_loss=0.05233, over 16159.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2637, pruned_loss=0.0455, over 3550272.36 frames. ], batch size: 94, lr: 8.20e-03, grad_scale: 8.0 +2023-03-09 05:29:13,134 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48094.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:29:16,282 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1442, 4.2226, 2.4752, 4.2031, 5.2505, 2.4842, 3.8341, 4.0406], + device='cuda:2'), covar=tensor([0.0078, 0.1049, 0.1482, 0.0496, 0.0046, 0.1278, 0.0649, 0.0632], + device='cuda:2'), in_proj_covar=tensor([0.0131, 0.0242, 0.0190, 0.0187, 0.0097, 0.0174, 0.0203, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 05:30:00,009 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48133.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:30:10,879 INFO [train.py:898] (2/4) Epoch 14, batch 900, loss[loss=0.1609, simple_loss=0.2398, pruned_loss=0.04098, over 17635.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2642, pruned_loss=0.04571, over 3561887.09 frames. ], batch size: 39, lr: 8.19e-03, grad_scale: 8.0 +2023-03-09 05:30:12,261 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48144.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:30:25,240 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5934, 2.1102, 2.6839, 2.6733, 3.2204, 5.0664, 4.6452, 3.7560], + device='cuda:2'), covar=tensor([0.1434, 0.2217, 0.2531, 0.1539, 0.2078, 0.0138, 0.0361, 0.0630], + device='cuda:2'), in_proj_covar=tensor([0.0263, 0.0321, 0.0342, 0.0259, 0.0373, 0.0205, 0.0275, 0.0224], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 05:30:26,939 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.713e+02 2.967e+02 3.519e+02 4.098e+02 9.060e+02, threshold=7.037e+02, percent-clipped=1.0 +2023-03-09 05:30:38,267 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48165.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 05:31:10,224 INFO [train.py:898] (2/4) Epoch 14, batch 950, loss[loss=0.1834, simple_loss=0.2596, pruned_loss=0.05358, over 18288.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2649, pruned_loss=0.046, over 3563106.53 frames. ], batch size: 47, lr: 8.19e-03, grad_scale: 8.0 +2023-03-09 05:31:34,421 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48212.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 05:31:57,878 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0515, 5.3332, 2.8429, 5.2056, 5.0359, 5.3515, 5.1620, 2.5277], + device='cuda:2'), covar=tensor([0.0160, 0.0104, 0.0782, 0.0071, 0.0080, 0.0095, 0.0107, 0.1003], + device='cuda:2'), in_proj_covar=tensor([0.0079, 0.0071, 0.0089, 0.0085, 0.0078, 0.0066, 0.0078, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 05:32:09,655 INFO [train.py:898] (2/4) Epoch 14, batch 1000, loss[loss=0.1999, simple_loss=0.2924, pruned_loss=0.05369, over 17709.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2643, pruned_loss=0.04562, over 3574808.56 frames. ], batch size: 70, lr: 8.19e-03, grad_scale: 8.0 +2023-03-09 05:32:26,020 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.854e+02 2.987e+02 3.542e+02 4.398e+02 9.134e+02, threshold=7.083e+02, percent-clipped=3.0 +2023-03-09 05:32:46,795 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48273.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 05:33:09,320 INFO [train.py:898] (2/4) Epoch 14, batch 1050, loss[loss=0.1546, simple_loss=0.2417, pruned_loss=0.03373, over 18267.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2645, pruned_loss=0.04565, over 3579917.91 frames. ], batch size: 47, lr: 8.18e-03, grad_scale: 8.0 +2023-03-09 05:33:47,364 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48324.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:34:08,864 INFO [train.py:898] (2/4) Epoch 14, batch 1100, loss[loss=0.1377, simple_loss=0.2225, pruned_loss=0.02645, over 18497.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2646, pruned_loss=0.04572, over 3586732.70 frames. ], batch size: 44, lr: 8.18e-03, grad_scale: 8.0 +2023-03-09 05:34:23,494 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.341e+02 3.220e+02 3.737e+02 4.236e+02 6.762e+02, threshold=7.474e+02, percent-clipped=0.0 +2023-03-09 05:34:59,419 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48385.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:35:08,048 INFO [train.py:898] (2/4) Epoch 14, batch 1150, loss[loss=0.1635, simple_loss=0.2394, pruned_loss=0.04376, over 18161.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2651, pruned_loss=0.04577, over 3585999.19 frames. ], batch size: 44, lr: 8.17e-03, grad_scale: 8.0 +2023-03-09 05:35:09,485 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48394.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:35:12,903 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48397.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:35:44,664 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48423.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:35:49,930 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48428.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:36:06,312 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48442.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:36:07,210 INFO [train.py:898] (2/4) Epoch 14, batch 1200, loss[loss=0.2053, simple_loss=0.2916, pruned_loss=0.05947, over 16331.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2651, pruned_loss=0.04569, over 3592377.36 frames. ], batch size: 95, lr: 8.17e-03, grad_scale: 8.0 +2023-03-09 05:36:08,556 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48444.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:36:22,332 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.965e+02 3.035e+02 3.656e+02 4.283e+02 1.032e+03, threshold=7.311e+02, percent-clipped=2.0 +2023-03-09 05:36:25,036 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48458.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:36:33,606 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48465.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 05:36:57,108 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48484.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:37:05,996 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48492.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:37:06,946 INFO [train.py:898] (2/4) Epoch 14, batch 1250, loss[loss=0.1416, simple_loss=0.2276, pruned_loss=0.02775, over 18408.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2651, pruned_loss=0.0458, over 3582172.88 frames. ], batch size: 42, lr: 8.16e-03, grad_scale: 8.0 +2023-03-09 05:37:14,721 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-03-09 05:37:23,692 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7682, 5.2466, 5.2180, 5.2384, 4.7973, 5.1429, 4.5387, 5.1278], + device='cuda:2'), covar=tensor([0.0245, 0.0267, 0.0206, 0.0391, 0.0317, 0.0223, 0.1108, 0.0296], + device='cuda:2'), in_proj_covar=tensor([0.0193, 0.0241, 0.0228, 0.0279, 0.0244, 0.0239, 0.0297, 0.0233], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 05:37:30,407 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48513.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 05:37:48,068 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2261, 5.7435, 5.2471, 5.5282, 5.3068, 5.1552, 5.7811, 5.7273], + device='cuda:2'), covar=tensor([0.1048, 0.0709, 0.0608, 0.0687, 0.1382, 0.0702, 0.0534, 0.0646], + device='cuda:2'), in_proj_covar=tensor([0.0550, 0.0462, 0.0341, 0.0489, 0.0664, 0.0493, 0.0650, 0.0481], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 05:37:51,832 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7555, 2.3547, 2.7841, 2.6991, 3.4323, 5.1341, 4.7863, 4.0442], + device='cuda:2'), covar=tensor([0.1275, 0.1976, 0.2436, 0.1465, 0.1782, 0.0119, 0.0332, 0.0531], + device='cuda:2'), in_proj_covar=tensor([0.0264, 0.0321, 0.0342, 0.0261, 0.0373, 0.0205, 0.0276, 0.0226], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 05:38:06,731 INFO [train.py:898] (2/4) Epoch 14, batch 1300, loss[loss=0.1813, simple_loss=0.2669, pruned_loss=0.04783, over 18485.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2651, pruned_loss=0.04627, over 3568961.55 frames. ], batch size: 53, lr: 8.16e-03, grad_scale: 8.0 +2023-03-09 05:38:14,438 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.86 vs. limit=5.0 +2023-03-09 05:38:21,324 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.028e+02 2.821e+02 3.494e+02 4.256e+02 8.799e+02, threshold=6.987e+02, percent-clipped=3.0 +2023-03-09 05:38:35,203 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48568.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 05:39:05,492 INFO [train.py:898] (2/4) Epoch 14, batch 1350, loss[loss=0.1991, simple_loss=0.2931, pruned_loss=0.05257, over 17768.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2657, pruned_loss=0.04617, over 3576293.68 frames. ], batch size: 70, lr: 8.16e-03, grad_scale: 8.0 +2023-03-09 05:40:05,012 INFO [train.py:898] (2/4) Epoch 14, batch 1400, loss[loss=0.1725, simple_loss=0.258, pruned_loss=0.04355, over 18380.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2667, pruned_loss=0.04653, over 3570323.42 frames. ], batch size: 50, lr: 8.15e-03, grad_scale: 8.0 +2023-03-09 05:40:19,851 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.683e+02 2.979e+02 3.618e+02 4.043e+02 7.873e+02, threshold=7.235e+02, percent-clipped=2.0 +2023-03-09 05:40:43,861 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6451, 3.0256, 4.3180, 3.9452, 2.6153, 4.5789, 3.9740, 2.9434], + device='cuda:2'), covar=tensor([0.0407, 0.1206, 0.0167, 0.0285, 0.1393, 0.0166, 0.0417, 0.0849], + device='cuda:2'), in_proj_covar=tensor([0.0196, 0.0225, 0.0167, 0.0145, 0.0213, 0.0190, 0.0216, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 05:40:48,857 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48680.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:41:04,620 INFO [train.py:898] (2/4) Epoch 14, batch 1450, loss[loss=0.1789, simple_loss=0.2703, pruned_loss=0.04381, over 17199.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2653, pruned_loss=0.04611, over 3577702.69 frames. ], batch size: 78, lr: 8.15e-03, grad_scale: 8.0 +2023-03-09 05:41:45,670 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48728.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:42:03,636 INFO [train.py:898] (2/4) Epoch 14, batch 1500, loss[loss=0.2114, simple_loss=0.2936, pruned_loss=0.06459, over 18372.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2647, pruned_loss=0.04561, over 3588197.45 frames. ], batch size: 56, lr: 8.14e-03, grad_scale: 8.0 +2023-03-09 05:42:07,838 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3050, 5.2953, 4.6860, 5.1847, 5.2305, 4.6517, 5.1343, 4.7660], + device='cuda:2'), covar=tensor([0.0552, 0.0566, 0.1749, 0.1031, 0.0669, 0.0531, 0.0588, 0.1139], + device='cuda:2'), in_proj_covar=tensor([0.0441, 0.0506, 0.0669, 0.0391, 0.0388, 0.0459, 0.0489, 0.0632], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 05:42:15,650 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48753.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:42:18,854 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.036e+02 2.981e+02 3.460e+02 4.226e+02 1.044e+03, threshold=6.921e+02, percent-clipped=2.0 +2023-03-09 05:42:42,173 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48776.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:42:45,564 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48779.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:43:02,592 INFO [train.py:898] (2/4) Epoch 14, batch 1550, loss[loss=0.1893, simple_loss=0.2848, pruned_loss=0.04687, over 17057.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2651, pruned_loss=0.04565, over 3599608.34 frames. ], batch size: 78, lr: 8.14e-03, grad_scale: 8.0 +2023-03-09 05:43:24,689 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7608, 3.6191, 3.4749, 3.1459, 3.4152, 2.7426, 2.8726, 3.7108], + device='cuda:2'), covar=tensor([0.0042, 0.0088, 0.0068, 0.0118, 0.0073, 0.0170, 0.0156, 0.0049], + device='cuda:2'), in_proj_covar=tensor([0.0111, 0.0135, 0.0114, 0.0166, 0.0118, 0.0163, 0.0164, 0.0097], + device='cuda:2'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 05:43:35,889 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48821.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:43:55,873 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48838.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:44:02,378 INFO [train.py:898] (2/4) Epoch 14, batch 1600, loss[loss=0.1726, simple_loss=0.2623, pruned_loss=0.04149, over 18359.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2647, pruned_loss=0.04546, over 3612874.86 frames. ], batch size: 50, lr: 8.14e-03, grad_scale: 8.0 +2023-03-09 05:44:02,757 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0551, 5.0449, 5.0767, 4.8808, 4.8853, 4.9995, 5.2829, 5.2379], + device='cuda:2'), covar=tensor([0.0060, 0.0048, 0.0052, 0.0087, 0.0050, 0.0101, 0.0055, 0.0084], + device='cuda:2'), in_proj_covar=tensor([0.0084, 0.0060, 0.0063, 0.0081, 0.0067, 0.0091, 0.0076, 0.0077], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 05:44:17,654 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.056e+02 2.816e+02 3.506e+02 4.400e+02 1.387e+03, threshold=7.012e+02, percent-clipped=5.0 +2023-03-09 05:44:31,422 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48868.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 05:44:47,268 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48882.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:44:59,863 INFO [train.py:898] (2/4) Epoch 14, batch 1650, loss[loss=0.1691, simple_loss=0.2488, pruned_loss=0.04474, over 18349.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2649, pruned_loss=0.04604, over 3593781.29 frames. ], batch size: 46, lr: 8.13e-03, grad_scale: 8.0 +2023-03-09 05:45:07,589 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48899.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:45:26,234 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6438, 4.0826, 4.1582, 3.0525, 3.5031, 3.2613, 2.6936, 2.1939], + device='cuda:2'), covar=tensor([0.0215, 0.0233, 0.0090, 0.0338, 0.0343, 0.0214, 0.0663, 0.0914], + device='cuda:2'), in_proj_covar=tensor([0.0061, 0.0051, 0.0052, 0.0062, 0.0082, 0.0058, 0.0073, 0.0080], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0004, 0.0004, 0.0006, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 05:45:27,192 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=48916.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 05:45:58,700 INFO [train.py:898] (2/4) Epoch 14, batch 1700, loss[loss=0.1734, simple_loss=0.2649, pruned_loss=0.04095, over 18365.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2647, pruned_loss=0.04594, over 3596902.38 frames. ], batch size: 55, lr: 8.13e-03, grad_scale: 8.0 +2023-03-09 05:46:15,063 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.904e+02 3.125e+02 3.663e+02 4.529e+02 8.545e+02, threshold=7.326e+02, percent-clipped=5.0 +2023-03-09 05:46:42,436 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48980.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:46:42,489 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6386, 2.8610, 4.2462, 3.7517, 2.5718, 4.5669, 3.9004, 2.8764], + device='cuda:2'), covar=tensor([0.0467, 0.1404, 0.0216, 0.0340, 0.1547, 0.0182, 0.0511, 0.0992], + device='cuda:2'), in_proj_covar=tensor([0.0201, 0.0231, 0.0174, 0.0150, 0.0219, 0.0194, 0.0224, 0.0194], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 05:46:57,442 INFO [train.py:898] (2/4) Epoch 14, batch 1750, loss[loss=0.1884, simple_loss=0.2724, pruned_loss=0.0522, over 18317.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2653, pruned_loss=0.04623, over 3595350.02 frames. ], batch size: 54, lr: 8.12e-03, grad_scale: 8.0 +2023-03-09 05:47:39,660 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49028.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:47:56,917 INFO [train.py:898] (2/4) Epoch 14, batch 1800, loss[loss=0.2183, simple_loss=0.2961, pruned_loss=0.07024, over 12470.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2654, pruned_loss=0.04614, over 3591077.22 frames. ], batch size: 129, lr: 8.12e-03, grad_scale: 8.0 +2023-03-09 05:47:57,369 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5720, 2.9272, 2.4438, 2.8385, 3.6877, 3.6352, 3.0845, 3.0423], + device='cuda:2'), covar=tensor([0.0177, 0.0241, 0.0636, 0.0362, 0.0150, 0.0177, 0.0364, 0.0338], + device='cuda:2'), in_proj_covar=tensor([0.0125, 0.0118, 0.0156, 0.0148, 0.0112, 0.0098, 0.0141, 0.0140], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 05:48:09,274 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49053.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:48:13,402 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.931e+02 2.916e+02 3.364e+02 3.970e+02 9.603e+02, threshold=6.727e+02, percent-clipped=4.0 +2023-03-09 05:48:16,145 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7033, 2.9524, 4.3422, 3.8220, 2.6866, 4.6528, 4.0123, 2.7112], + device='cuda:2'), covar=tensor([0.0472, 0.1355, 0.0236, 0.0356, 0.1428, 0.0162, 0.0433, 0.1032], + device='cuda:2'), in_proj_covar=tensor([0.0202, 0.0233, 0.0175, 0.0152, 0.0220, 0.0195, 0.0225, 0.0196], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 05:48:40,793 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49079.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:48:56,154 INFO [train.py:898] (2/4) Epoch 14, batch 1850, loss[loss=0.1841, simple_loss=0.2646, pruned_loss=0.05179, over 18517.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2644, pruned_loss=0.04552, over 3602914.95 frames. ], batch size: 47, lr: 8.12e-03, grad_scale: 8.0 +2023-03-09 05:49:06,063 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49101.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:49:37,851 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49127.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:49:55,816 INFO [train.py:898] (2/4) Epoch 14, batch 1900, loss[loss=0.1652, simple_loss=0.2452, pruned_loss=0.04258, over 18513.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2645, pruned_loss=0.04551, over 3595437.98 frames. ], batch size: 47, lr: 8.11e-03, grad_scale: 8.0 +2023-03-09 05:50:11,576 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.722e+02 2.855e+02 3.283e+02 4.141e+02 6.742e+02, threshold=6.565e+02, percent-clipped=1.0 +2023-03-09 05:50:37,028 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49177.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:50:54,828 INFO [train.py:898] (2/4) Epoch 14, batch 1950, loss[loss=0.1747, simple_loss=0.2651, pruned_loss=0.04218, over 18388.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2642, pruned_loss=0.04522, over 3595035.78 frames. ], batch size: 50, lr: 8.11e-03, grad_scale: 8.0 +2023-03-09 05:50:56,176 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49194.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:51:46,892 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49236.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:51:54,563 INFO [train.py:898] (2/4) Epoch 14, batch 2000, loss[loss=0.1759, simple_loss=0.2657, pruned_loss=0.04309, over 17999.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2633, pruned_loss=0.04491, over 3602058.50 frames. ], batch size: 65, lr: 8.10e-03, grad_scale: 8.0 +2023-03-09 05:52:09,968 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.669e+02 2.933e+02 3.417e+02 3.982e+02 2.319e+03, threshold=6.833e+02, percent-clipped=9.0 +2023-03-09 05:52:41,702 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5957, 2.8236, 2.7149, 2.8173, 3.6528, 3.6924, 3.0957, 3.0295], + device='cuda:2'), covar=tensor([0.0210, 0.0274, 0.0501, 0.0372, 0.0193, 0.0136, 0.0356, 0.0323], + device='cuda:2'), in_proj_covar=tensor([0.0125, 0.0119, 0.0155, 0.0149, 0.0113, 0.0099, 0.0141, 0.0140], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 05:52:44,360 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 05:52:53,833 INFO [train.py:898] (2/4) Epoch 14, batch 2050, loss[loss=0.1982, simple_loss=0.2849, pruned_loss=0.05576, over 18626.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2636, pruned_loss=0.04495, over 3593775.75 frames. ], batch size: 52, lr: 8.10e-03, grad_scale: 8.0 +2023-03-09 05:52:58,898 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49297.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:53:34,578 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3529, 2.6651, 2.4938, 2.6689, 3.4764, 3.4767, 2.9287, 2.9486], + device='cuda:2'), covar=tensor([0.0208, 0.0314, 0.0601, 0.0451, 0.0248, 0.0199, 0.0405, 0.0351], + device='cuda:2'), in_proj_covar=tensor([0.0126, 0.0119, 0.0155, 0.0148, 0.0114, 0.0099, 0.0141, 0.0140], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 05:53:54,012 INFO [train.py:898] (2/4) Epoch 14, batch 2100, loss[loss=0.1875, simple_loss=0.2787, pruned_loss=0.04819, over 18352.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2638, pruned_loss=0.04486, over 3601545.43 frames. ], batch size: 55, lr: 8.09e-03, grad_scale: 8.0 +2023-03-09 05:54:08,754 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.284e+02 2.957e+02 3.376e+02 4.046e+02 6.341e+02, threshold=6.752e+02, percent-clipped=0.0 +2023-03-09 05:54:47,242 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-03-09 05:54:53,544 INFO [train.py:898] (2/4) Epoch 14, batch 2150, loss[loss=0.1911, simple_loss=0.2708, pruned_loss=0.05569, over 18171.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2639, pruned_loss=0.04476, over 3601756.04 frames. ], batch size: 60, lr: 8.09e-03, grad_scale: 8.0 +2023-03-09 05:55:27,878 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49422.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:55:29,018 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5824, 5.1058, 5.0705, 5.1429, 4.6058, 4.9720, 4.3660, 4.9443], + device='cuda:2'), covar=tensor([0.0242, 0.0340, 0.0234, 0.0397, 0.0402, 0.0241, 0.1294, 0.0365], + device='cuda:2'), in_proj_covar=tensor([0.0193, 0.0238, 0.0228, 0.0279, 0.0242, 0.0240, 0.0296, 0.0230], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 05:55:39,986 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49432.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 05:55:52,816 INFO [train.py:898] (2/4) Epoch 14, batch 2200, loss[loss=0.1654, simple_loss=0.2414, pruned_loss=0.04471, over 18253.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2647, pruned_loss=0.04508, over 3605058.36 frames. ], batch size: 45, lr: 8.09e-03, grad_scale: 8.0 +2023-03-09 05:56:04,506 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8746, 5.3146, 2.8075, 5.1405, 4.9769, 5.3354, 5.1383, 2.7448], + device='cuda:2'), covar=tensor([0.0211, 0.0079, 0.0765, 0.0083, 0.0082, 0.0077, 0.0091, 0.0984], + device='cuda:2'), in_proj_covar=tensor([0.0080, 0.0072, 0.0090, 0.0084, 0.0079, 0.0068, 0.0079, 0.0093], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 05:56:07,485 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.048e+02 2.848e+02 3.284e+02 4.322e+02 8.537e+02, threshold=6.567e+02, percent-clipped=2.0 +2023-03-09 05:56:28,592 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4332, 2.7643, 2.5582, 2.7153, 3.6039, 3.5541, 3.0088, 2.8093], + device='cuda:2'), covar=tensor([0.0195, 0.0288, 0.0554, 0.0379, 0.0178, 0.0148, 0.0363, 0.0373], + device='cuda:2'), in_proj_covar=tensor([0.0125, 0.0119, 0.0156, 0.0148, 0.0113, 0.0099, 0.0140, 0.0140], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 05:56:33,096 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49477.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:56:40,668 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49483.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:56:52,264 INFO [train.py:898] (2/4) Epoch 14, batch 2250, loss[loss=0.1603, simple_loss=0.2383, pruned_loss=0.04116, over 18523.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2648, pruned_loss=0.04535, over 3601628.51 frames. ], batch size: 44, lr: 8.08e-03, grad_scale: 8.0 +2023-03-09 05:56:52,631 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49493.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 05:56:53,558 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49494.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:57:29,646 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49525.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:57:47,843 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-09 05:57:49,600 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49542.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:57:50,514 INFO [train.py:898] (2/4) Epoch 14, batch 2300, loss[loss=0.18, simple_loss=0.2722, pruned_loss=0.04388, over 18274.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2657, pruned_loss=0.04583, over 3602152.92 frames. ], batch size: 47, lr: 8.08e-03, grad_scale: 8.0 +2023-03-09 05:58:05,065 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.043e+02 3.294e+02 3.736e+02 4.526e+02 1.029e+03, threshold=7.472e+02, percent-clipped=8.0 +2023-03-09 05:58:15,573 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.58 vs. limit=5.0 +2023-03-09 05:58:30,249 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0336, 4.1031, 2.3819, 4.1046, 5.1445, 2.4414, 3.5983, 3.9408], + device='cuda:2'), covar=tensor([0.0093, 0.1183, 0.1549, 0.0516, 0.0056, 0.1271, 0.0716, 0.0709], + device='cuda:2'), in_proj_covar=tensor([0.0134, 0.0245, 0.0192, 0.0188, 0.0098, 0.0175, 0.0205, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 05:58:47,680 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49592.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 05:58:48,616 INFO [train.py:898] (2/4) Epoch 14, batch 2350, loss[loss=0.1527, simple_loss=0.2293, pruned_loss=0.03804, over 17623.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2661, pruned_loss=0.04599, over 3600865.89 frames. ], batch size: 39, lr: 8.07e-03, grad_scale: 8.0 +2023-03-09 05:59:48,061 INFO [train.py:898] (2/4) Epoch 14, batch 2400, loss[loss=0.152, simple_loss=0.2378, pruned_loss=0.03313, over 18425.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2652, pruned_loss=0.04547, over 3599538.30 frames. ], batch size: 42, lr: 8.07e-03, grad_scale: 8.0 +2023-03-09 05:59:55,787 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6696, 5.2421, 5.1910, 5.1655, 4.7537, 5.0696, 4.5386, 5.0331], + device='cuda:2'), covar=tensor([0.0227, 0.0252, 0.0187, 0.0364, 0.0332, 0.0209, 0.1080, 0.0289], + device='cuda:2'), in_proj_covar=tensor([0.0189, 0.0234, 0.0228, 0.0276, 0.0238, 0.0237, 0.0291, 0.0227], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-09 06:00:03,323 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.270e+02 3.059e+02 3.720e+02 4.555e+02 1.609e+03, threshold=7.441e+02, percent-clipped=3.0 +2023-03-09 06:00:19,329 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0543, 5.1234, 5.2147, 5.2408, 5.0161, 5.7529, 5.2912, 5.0515], + device='cuda:2'), covar=tensor([0.1159, 0.0654, 0.0784, 0.0744, 0.1355, 0.0810, 0.0688, 0.1770], + device='cuda:2'), in_proj_covar=tensor([0.0327, 0.0257, 0.0272, 0.0274, 0.0308, 0.0385, 0.0254, 0.0374], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:2') +2023-03-09 06:00:37,617 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9139, 5.3509, 2.8634, 5.1630, 4.9669, 5.3742, 5.1610, 2.6619], + device='cuda:2'), covar=tensor([0.0198, 0.0060, 0.0754, 0.0088, 0.0081, 0.0068, 0.0086, 0.0981], + device='cuda:2'), in_proj_covar=tensor([0.0081, 0.0072, 0.0091, 0.0085, 0.0080, 0.0069, 0.0079, 0.0094], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 06:00:37,658 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6349, 3.3244, 1.9516, 4.3119, 3.1780, 3.9239, 1.9008, 3.6544], + device='cuda:2'), covar=tensor([0.0518, 0.0848, 0.1508, 0.0408, 0.0680, 0.0399, 0.1582, 0.0474], + device='cuda:2'), in_proj_covar=tensor([0.0201, 0.0219, 0.0182, 0.0259, 0.0183, 0.0253, 0.0196, 0.0191], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 06:00:46,955 INFO [train.py:898] (2/4) Epoch 14, batch 2450, loss[loss=0.1625, simple_loss=0.2483, pruned_loss=0.03833, over 18422.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.266, pruned_loss=0.04568, over 3595898.39 frames. ], batch size: 48, lr: 8.07e-03, grad_scale: 8.0 +2023-03-09 06:01:46,429 INFO [train.py:898] (2/4) Epoch 14, batch 2500, loss[loss=0.1642, simple_loss=0.2565, pruned_loss=0.03592, over 18370.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2645, pruned_loss=0.0451, over 3599336.03 frames. ], batch size: 50, lr: 8.06e-03, grad_scale: 8.0 +2023-03-09 06:02:01,767 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.909e+02 2.668e+02 3.143e+02 3.903e+02 7.073e+02, threshold=6.286e+02, percent-clipped=0.0 +2023-03-09 06:02:13,391 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49766.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:02:20,343 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-03-09 06:02:27,102 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49778.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:02:39,165 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49788.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 06:02:44,645 INFO [train.py:898] (2/4) Epoch 14, batch 2550, loss[loss=0.1633, simple_loss=0.2373, pruned_loss=0.04468, over 18150.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2639, pruned_loss=0.04485, over 3589993.31 frames. ], batch size: 44, lr: 8.06e-03, grad_scale: 8.0 +2023-03-09 06:02:48,301 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 06:03:24,864 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49827.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:03:44,086 INFO [train.py:898] (2/4) Epoch 14, batch 2600, loss[loss=0.1923, simple_loss=0.287, pruned_loss=0.04877, over 18397.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2642, pruned_loss=0.04481, over 3580441.62 frames. ], batch size: 52, lr: 8.05e-03, grad_scale: 8.0 +2023-03-09 06:03:59,916 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.176e+02 2.916e+02 3.456e+02 4.257e+02 7.547e+02, threshold=6.912e+02, percent-clipped=3.0 +2023-03-09 06:04:42,279 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49892.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:04:43,097 INFO [train.py:898] (2/4) Epoch 14, batch 2650, loss[loss=0.1558, simple_loss=0.2372, pruned_loss=0.03718, over 18425.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2651, pruned_loss=0.04516, over 3573832.91 frames. ], batch size: 43, lr: 8.05e-03, grad_scale: 8.0 +2023-03-09 06:04:56,598 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4366, 3.2380, 1.9146, 4.2874, 2.9157, 4.1666, 2.0625, 3.7244], + device='cuda:2'), covar=tensor([0.0596, 0.0852, 0.1492, 0.0400, 0.0866, 0.0245, 0.1330, 0.0405], + device='cuda:2'), in_proj_covar=tensor([0.0201, 0.0219, 0.0183, 0.0258, 0.0184, 0.0253, 0.0195, 0.0191], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 06:05:32,976 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9088, 3.6508, 5.0710, 2.8774, 4.4283, 2.6539, 3.2981, 1.7834], + device='cuda:2'), covar=tensor([0.1035, 0.0832, 0.0130, 0.0799, 0.0504, 0.2402, 0.2268, 0.1968], + device='cuda:2'), in_proj_covar=tensor([0.0204, 0.0221, 0.0136, 0.0177, 0.0234, 0.0250, 0.0295, 0.0215], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 06:05:38,378 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=49940.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:05:42,181 INFO [train.py:898] (2/4) Epoch 14, batch 2700, loss[loss=0.1613, simple_loss=0.2384, pruned_loss=0.0421, over 18492.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2645, pruned_loss=0.04492, over 3587691.92 frames. ], batch size: 44, lr: 8.05e-03, grad_scale: 8.0 +2023-03-09 06:05:47,356 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5635, 2.1602, 2.5298, 2.6230, 3.0934, 4.6048, 4.2694, 3.4576], + device='cuda:2'), covar=tensor([0.1425, 0.2123, 0.2547, 0.1523, 0.2035, 0.0184, 0.0408, 0.0659], + device='cuda:2'), in_proj_covar=tensor([0.0266, 0.0321, 0.0344, 0.0260, 0.0374, 0.0210, 0.0278, 0.0226], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 06:05:57,464 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.140e+02 2.972e+02 3.465e+02 4.147e+02 6.859e+02, threshold=6.931e+02, percent-clipped=0.0 +2023-03-09 06:06:40,850 INFO [train.py:898] (2/4) Epoch 14, batch 2750, loss[loss=0.1551, simple_loss=0.2314, pruned_loss=0.0394, over 18478.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2645, pruned_loss=0.04491, over 3592439.56 frames. ], batch size: 44, lr: 8.04e-03, grad_scale: 8.0 +2023-03-09 06:07:32,684 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-03-09 06:07:43,616 INFO [train.py:898] (2/4) Epoch 14, batch 2800, loss[loss=0.1793, simple_loss=0.269, pruned_loss=0.04479, over 18496.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2645, pruned_loss=0.04512, over 3598942.78 frames. ], batch size: 51, lr: 8.04e-03, grad_scale: 16.0 +2023-03-09 06:07:58,950 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.149e+02 3.000e+02 3.519e+02 4.137e+02 9.656e+02, threshold=7.037e+02, percent-clipped=4.0 +2023-03-09 06:08:25,656 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50078.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:08:36,991 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50088.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 06:08:42,938 INFO [train.py:898] (2/4) Epoch 14, batch 2850, loss[loss=0.1792, simple_loss=0.2581, pruned_loss=0.05018, over 18540.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2651, pruned_loss=0.04541, over 3586907.35 frames. ], batch size: 49, lr: 8.03e-03, grad_scale: 16.0 +2023-03-09 06:09:17,620 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50122.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:09:17,734 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5026, 5.0560, 4.9835, 5.0423, 4.4766, 4.8664, 4.3614, 4.8434], + device='cuda:2'), covar=tensor([0.0279, 0.0240, 0.0212, 0.0354, 0.0393, 0.0226, 0.1119, 0.0318], + device='cuda:2'), in_proj_covar=tensor([0.0192, 0.0235, 0.0228, 0.0275, 0.0241, 0.0240, 0.0290, 0.0230], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-09 06:09:17,847 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1727, 2.6366, 2.3162, 2.7035, 3.2611, 3.2340, 2.9396, 2.7295], + device='cuda:2'), covar=tensor([0.0206, 0.0287, 0.0596, 0.0362, 0.0216, 0.0158, 0.0343, 0.0355], + device='cuda:2'), in_proj_covar=tensor([0.0125, 0.0120, 0.0156, 0.0145, 0.0113, 0.0100, 0.0139, 0.0140], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 06:09:22,112 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50126.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:09:24,571 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50128.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:09:33,407 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50136.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 06:09:40,939 INFO [train.py:898] (2/4) Epoch 14, batch 2900, loss[loss=0.1668, simple_loss=0.2433, pruned_loss=0.0452, over 18497.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2654, pruned_loss=0.04559, over 3579030.09 frames. ], batch size: 44, lr: 8.03e-03, grad_scale: 16.0 +2023-03-09 06:09:56,512 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.084e+02 3.147e+02 3.827e+02 4.513e+02 7.864e+02, threshold=7.653e+02, percent-clipped=1.0 +2023-03-09 06:10:03,367 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8298, 3.2183, 4.4989, 4.0612, 3.1828, 4.9089, 4.1073, 3.1714], + device='cuda:2'), covar=tensor([0.0522, 0.1344, 0.0233, 0.0353, 0.1253, 0.0158, 0.0543, 0.0944], + device='cuda:2'), in_proj_covar=tensor([0.0202, 0.0231, 0.0176, 0.0150, 0.0217, 0.0196, 0.0224, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 06:10:34,475 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50189.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:10:38,567 INFO [train.py:898] (2/4) Epoch 14, batch 2950, loss[loss=0.1677, simple_loss=0.2463, pruned_loss=0.0445, over 18489.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2656, pruned_loss=0.04562, over 3569607.49 frames. ], batch size: 44, lr: 8.03e-03, grad_scale: 16.0 +2023-03-09 06:10:43,863 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50197.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 06:10:59,926 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.60 vs. limit=5.0 +2023-03-09 06:11:37,783 INFO [train.py:898] (2/4) Epoch 14, batch 3000, loss[loss=0.191, simple_loss=0.2796, pruned_loss=0.05122, over 18279.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2647, pruned_loss=0.04532, over 3575996.52 frames. ], batch size: 57, lr: 8.02e-03, grad_scale: 16.0 +2023-03-09 06:11:37,783 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 06:11:49,727 INFO [train.py:932] (2/4) Epoch 14, validation: loss=0.1532, simple_loss=0.2546, pruned_loss=0.02587, over 944034.00 frames. +2023-03-09 06:11:49,728 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 06:12:04,125 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.273e+02 3.285e+02 3.966e+02 4.720e+02 1.017e+03, threshold=7.933e+02, percent-clipped=5.0 +2023-03-09 06:12:07,529 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50258.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 06:12:46,942 INFO [train.py:898] (2/4) Epoch 14, batch 3050, loss[loss=0.1981, simple_loss=0.2845, pruned_loss=0.05582, over 18586.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2642, pruned_loss=0.04507, over 3595082.33 frames. ], batch size: 54, lr: 8.02e-03, grad_scale: 16.0 +2023-03-09 06:13:45,660 INFO [train.py:898] (2/4) Epoch 14, batch 3100, loss[loss=0.1668, simple_loss=0.2604, pruned_loss=0.03662, over 17717.00 frames. ], tot_loss[loss=0.178, simple_loss=0.265, pruned_loss=0.04552, over 3579477.83 frames. ], batch size: 70, lr: 8.01e-03, grad_scale: 16.0 +2023-03-09 06:14:00,592 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.982e+02 2.722e+02 3.409e+02 4.335e+02 9.164e+02, threshold=6.818e+02, percent-clipped=2.0 +2023-03-09 06:14:42,790 INFO [train.py:898] (2/4) Epoch 14, batch 3150, loss[loss=0.1932, simple_loss=0.2811, pruned_loss=0.05267, over 17595.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.265, pruned_loss=0.04565, over 3584510.21 frames. ], batch size: 70, lr: 8.01e-03, grad_scale: 16.0 +2023-03-09 06:15:18,798 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50422.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:15:42,275 INFO [train.py:898] (2/4) Epoch 14, batch 3200, loss[loss=0.1532, simple_loss=0.2444, pruned_loss=0.03102, over 18421.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2644, pruned_loss=0.04529, over 3587255.79 frames. ], batch size: 48, lr: 8.01e-03, grad_scale: 16.0 +2023-03-09 06:15:58,906 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.049e+02 3.101e+02 3.675e+02 4.644e+02 1.158e+03, threshold=7.350e+02, percent-clipped=4.0 +2023-03-09 06:16:14,168 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50470.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:16:30,698 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50484.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:16:40,181 INFO [train.py:898] (2/4) Epoch 14, batch 3250, loss[loss=0.1843, simple_loss=0.2758, pruned_loss=0.04639, over 18499.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2638, pruned_loss=0.04499, over 3592833.17 frames. ], batch size: 53, lr: 8.00e-03, grad_scale: 8.0 +2023-03-09 06:17:39,139 INFO [train.py:898] (2/4) Epoch 14, batch 3300, loss[loss=0.152, simple_loss=0.2301, pruned_loss=0.03693, over 16801.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.264, pruned_loss=0.04512, over 3598160.87 frames. ], batch size: 37, lr: 8.00e-03, grad_scale: 8.0 +2023-03-09 06:17:42,635 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50546.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:17:50,973 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50553.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 06:17:55,809 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.637e+02 3.010e+02 3.418e+02 4.071e+02 6.649e+02, threshold=6.837e+02, percent-clipped=0.0 +2023-03-09 06:18:16,609 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.53 vs. limit=5.0 +2023-03-09 06:18:37,882 INFO [train.py:898] (2/4) Epoch 14, batch 3350, loss[loss=0.1828, simple_loss=0.2704, pruned_loss=0.04753, over 16022.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2639, pruned_loss=0.04496, over 3591780.71 frames. ], batch size: 94, lr: 8.00e-03, grad_scale: 8.0 +2023-03-09 06:18:55,339 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50607.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:19:04,208 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3286, 5.1920, 5.5985, 5.5465, 5.1962, 6.0863, 5.7002, 5.3388], + device='cuda:2'), covar=tensor([0.0924, 0.0555, 0.0594, 0.0595, 0.1402, 0.0644, 0.0567, 0.1550], + device='cuda:2'), in_proj_covar=tensor([0.0324, 0.0252, 0.0268, 0.0269, 0.0303, 0.0375, 0.0250, 0.0368], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0002, 0.0002, 0.0002, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:2') +2023-03-09 06:19:36,703 INFO [train.py:898] (2/4) Epoch 14, batch 3400, loss[loss=0.1462, simple_loss=0.2317, pruned_loss=0.03033, over 18375.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.263, pruned_loss=0.04429, over 3599044.05 frames. ], batch size: 46, lr: 7.99e-03, grad_scale: 8.0 +2023-03-09 06:19:53,297 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.118e+02 2.885e+02 3.458e+02 4.304e+02 7.306e+02, threshold=6.916e+02, percent-clipped=1.0 +2023-03-09 06:20:35,020 INFO [train.py:898] (2/4) Epoch 14, batch 3450, loss[loss=0.1678, simple_loss=0.2483, pruned_loss=0.04363, over 18472.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.262, pruned_loss=0.04388, over 3605780.01 frames. ], batch size: 43, lr: 7.99e-03, grad_scale: 8.0 +2023-03-09 06:20:39,936 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0909, 5.4209, 3.0023, 5.2183, 5.1377, 5.4627, 5.2159, 2.8257], + device='cuda:2'), covar=tensor([0.0167, 0.0052, 0.0656, 0.0067, 0.0057, 0.0051, 0.0083, 0.0898], + device='cuda:2'), in_proj_covar=tensor([0.0081, 0.0073, 0.0090, 0.0087, 0.0080, 0.0069, 0.0080, 0.0094], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 06:21:32,281 INFO [train.py:898] (2/4) Epoch 14, batch 3500, loss[loss=0.1698, simple_loss=0.257, pruned_loss=0.04133, over 18383.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2628, pruned_loss=0.04413, over 3608024.51 frames. ], batch size: 50, lr: 7.98e-03, grad_scale: 8.0 +2023-03-09 06:21:47,964 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.084e+02 3.008e+02 3.421e+02 4.567e+02 7.789e+02, threshold=6.843e+02, percent-clipped=2.0 +2023-03-09 06:22:12,532 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.1448, 5.5810, 2.9359, 5.3767, 5.2862, 5.6171, 5.3914, 2.9198], + device='cuda:2'), covar=tensor([0.0164, 0.0046, 0.0650, 0.0055, 0.0061, 0.0048, 0.0070, 0.0850], + device='cuda:2'), in_proj_covar=tensor([0.0082, 0.0073, 0.0091, 0.0087, 0.0080, 0.0069, 0.0080, 0.0094], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 06:22:15,611 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50782.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:22:17,538 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50784.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:22:26,427 INFO [train.py:898] (2/4) Epoch 14, batch 3550, loss[loss=0.1726, simple_loss=0.2618, pruned_loss=0.04173, over 18492.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2636, pruned_loss=0.04449, over 3605359.32 frames. ], batch size: 47, lr: 7.98e-03, grad_scale: 8.0 +2023-03-09 06:22:43,181 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50808.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 06:23:08,090 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50832.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:23:19,506 INFO [train.py:898] (2/4) Epoch 14, batch 3600, loss[loss=0.1856, simple_loss=0.273, pruned_loss=0.04912, over 18493.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2643, pruned_loss=0.0447, over 3595477.81 frames. ], batch size: 53, lr: 7.98e-03, grad_scale: 8.0 +2023-03-09 06:23:19,834 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50843.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:23:30,432 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50853.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 06:23:30,940 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-09 06:23:34,503 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.994e+02 3.100e+02 3.552e+02 4.596e+02 8.414e+02, threshold=7.104e+02, percent-clipped=7.0 +2023-03-09 06:23:47,675 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50869.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 06:24:25,707 INFO [train.py:898] (2/4) Epoch 15, batch 0, loss[loss=0.1487, simple_loss=0.2332, pruned_loss=0.03207, over 18483.00 frames. ], tot_loss[loss=0.1487, simple_loss=0.2332, pruned_loss=0.03207, over 18483.00 frames. ], batch size: 47, lr: 7.70e-03, grad_scale: 8.0 +2023-03-09 06:24:25,708 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 06:24:32,812 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2043, 3.1932, 1.9305, 3.8673, 2.6782, 3.6340, 2.1962, 3.4127], + device='cuda:2'), covar=tensor([0.0687, 0.0904, 0.1473, 0.0490, 0.0943, 0.0326, 0.1265, 0.0430], + device='cuda:2'), in_proj_covar=tensor([0.0202, 0.0216, 0.0181, 0.0258, 0.0184, 0.0252, 0.0194, 0.0192], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 06:24:37,510 INFO [train.py:932] (2/4) Epoch 15, validation: loss=0.1543, simple_loss=0.2557, pruned_loss=0.02649, over 944034.00 frames. +2023-03-09 06:24:37,510 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 06:24:40,688 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8093, 3.6626, 5.0206, 4.2572, 3.2002, 2.9554, 4.3724, 5.1942], + device='cuda:2'), covar=tensor([0.0765, 0.1423, 0.0129, 0.0379, 0.0989, 0.1148, 0.0402, 0.0228], + device='cuda:2'), in_proj_covar=tensor([0.0139, 0.0256, 0.0122, 0.0170, 0.0182, 0.0182, 0.0183, 0.0167], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 06:25:06,591 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=50901.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 06:25:07,701 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50902.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:25:11,461 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-03-09 06:25:34,615 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9446, 5.4106, 5.4625, 5.3940, 4.9855, 5.2879, 4.8173, 5.3460], + device='cuda:2'), covar=tensor([0.0238, 0.0275, 0.0177, 0.0441, 0.0396, 0.0251, 0.1018, 0.0275], + device='cuda:2'), in_proj_covar=tensor([0.0190, 0.0235, 0.0231, 0.0280, 0.0241, 0.0240, 0.0291, 0.0229], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-09 06:25:35,413 INFO [train.py:898] (2/4) Epoch 15, batch 50, loss[loss=0.1753, simple_loss=0.2555, pruned_loss=0.04757, over 18267.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2669, pruned_loss=0.04729, over 800232.02 frames. ], batch size: 45, lr: 7.70e-03, grad_scale: 8.0 +2023-03-09 06:25:58,307 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50946.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:26:11,155 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.078e+02 2.874e+02 3.277e+02 4.282e+02 1.387e+03, threshold=6.554e+02, percent-clipped=5.0 +2023-03-09 06:26:33,994 INFO [train.py:898] (2/4) Epoch 15, batch 100, loss[loss=0.1484, simple_loss=0.2329, pruned_loss=0.03194, over 18486.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2641, pruned_loss=0.046, over 1412464.53 frames. ], batch size: 44, lr: 7.69e-03, grad_scale: 8.0 +2023-03-09 06:26:41,220 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.58 vs. limit=5.0 +2023-03-09 06:27:10,674 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51007.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:27:33,103 INFO [train.py:898] (2/4) Epoch 15, batch 150, loss[loss=0.1736, simple_loss=0.2564, pruned_loss=0.0454, over 18549.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2641, pruned_loss=0.04511, over 1905685.08 frames. ], batch size: 49, lr: 7.69e-03, grad_scale: 8.0 +2023-03-09 06:28:07,421 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2813, 5.2615, 5.3924, 5.4968, 5.1399, 5.9902, 5.7053, 5.2468], + device='cuda:2'), covar=tensor([0.1165, 0.0654, 0.0857, 0.0746, 0.1642, 0.0826, 0.0601, 0.1971], + device='cuda:2'), in_proj_covar=tensor([0.0336, 0.0262, 0.0278, 0.0277, 0.0314, 0.0390, 0.0254, 0.0375], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:2') +2023-03-09 06:28:09,467 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.782e+02 2.876e+02 3.303e+02 4.141e+02 9.283e+02, threshold=6.606e+02, percent-clipped=4.0 +2023-03-09 06:28:15,494 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51062.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:28:28,182 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.81 vs. limit=5.0 +2023-03-09 06:28:31,740 INFO [train.py:898] (2/4) Epoch 15, batch 200, loss[loss=0.1759, simple_loss=0.2606, pruned_loss=0.04565, over 18536.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2641, pruned_loss=0.04514, over 2275070.23 frames. ], batch size: 49, lr: 7.69e-03, grad_scale: 8.0 +2023-03-09 06:28:49,593 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.32 vs. limit=5.0 +2023-03-09 06:29:25,537 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51123.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:29:28,806 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6468, 5.1383, 5.1355, 5.0960, 4.6037, 5.0328, 4.5208, 5.0355], + device='cuda:2'), covar=tensor([0.0240, 0.0265, 0.0211, 0.0376, 0.0409, 0.0208, 0.1042, 0.0278], + device='cuda:2'), in_proj_covar=tensor([0.0191, 0.0236, 0.0232, 0.0282, 0.0243, 0.0240, 0.0292, 0.0229], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0005], + device='cuda:2') +2023-03-09 06:29:29,637 INFO [train.py:898] (2/4) Epoch 15, batch 250, loss[loss=0.1771, simple_loss=0.2711, pruned_loss=0.04152, over 18133.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.265, pruned_loss=0.04525, over 2567875.32 frames. ], batch size: 62, lr: 7.68e-03, grad_scale: 8.0 +2023-03-09 06:29:30,097 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7253, 3.6687, 5.0676, 4.3790, 3.1729, 2.8772, 4.4381, 5.2118], + device='cuda:2'), covar=tensor([0.0753, 0.1513, 0.0116, 0.0331, 0.0927, 0.1175, 0.0358, 0.0160], + device='cuda:2'), in_proj_covar=tensor([0.0142, 0.0259, 0.0124, 0.0172, 0.0184, 0.0185, 0.0185, 0.0168], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 06:29:41,528 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51138.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:30:03,163 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.733e+02 3.055e+02 3.759e+02 4.768e+02 8.337e+02, threshold=7.517e+02, percent-clipped=9.0 +2023-03-09 06:30:08,716 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5176, 3.0536, 4.3318, 3.5225, 2.5896, 4.6269, 3.9422, 2.8082], + device='cuda:2'), covar=tensor([0.0522, 0.1278, 0.0198, 0.0431, 0.1493, 0.0158, 0.0504, 0.0991], + device='cuda:2'), in_proj_covar=tensor([0.0200, 0.0230, 0.0178, 0.0150, 0.0217, 0.0194, 0.0222, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 06:30:12,420 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51164.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 06:30:21,590 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51172.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 06:30:27,053 INFO [train.py:898] (2/4) Epoch 15, batch 300, loss[loss=0.1702, simple_loss=0.2603, pruned_loss=0.04004, over 18397.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2656, pruned_loss=0.04539, over 2802741.28 frames. ], batch size: 52, lr: 7.68e-03, grad_scale: 8.0 +2023-03-09 06:30:28,523 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1782, 3.1718, 2.0467, 3.7805, 2.6740, 3.5957, 2.3329, 3.2874], + device='cuda:2'), covar=tensor([0.0583, 0.0739, 0.1277, 0.0551, 0.0822, 0.0340, 0.1074, 0.0440], + device='cuda:2'), in_proj_covar=tensor([0.0206, 0.0218, 0.0183, 0.0264, 0.0187, 0.0256, 0.0197, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 06:30:37,777 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-03-09 06:30:56,806 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51202.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:31:04,788 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6198, 2.8437, 2.5441, 2.8804, 3.7376, 3.5335, 3.1340, 3.0717], + device='cuda:2'), covar=tensor([0.0185, 0.0323, 0.0576, 0.0329, 0.0149, 0.0161, 0.0375, 0.0348], + device='cuda:2'), in_proj_covar=tensor([0.0130, 0.0126, 0.0159, 0.0151, 0.0116, 0.0102, 0.0143, 0.0143], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 06:31:26,183 INFO [train.py:898] (2/4) Epoch 15, batch 350, loss[loss=0.1811, simple_loss=0.2741, pruned_loss=0.04405, over 18490.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2645, pruned_loss=0.04504, over 2986736.81 frames. ], batch size: 53, lr: 7.67e-03, grad_scale: 8.0 +2023-03-09 06:31:33,486 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51233.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 06:31:53,050 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51250.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:32:01,196 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.843e+02 2.755e+02 3.363e+02 4.160e+02 1.249e+03, threshold=6.726e+02, percent-clipped=1.0 +2023-03-09 06:32:25,069 INFO [train.py:898] (2/4) Epoch 15, batch 400, loss[loss=0.1887, simple_loss=0.2739, pruned_loss=0.05171, over 18355.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2632, pruned_loss=0.04452, over 3124897.39 frames. ], batch size: 56, lr: 7.67e-03, grad_scale: 8.0 +2023-03-09 06:32:25,891 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=2.05 vs. limit=2.0 +2023-03-09 06:32:53,544 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51302.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:33:07,537 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4524, 5.4387, 5.0515, 5.3249, 5.3784, 4.7325, 5.3177, 5.0793], + device='cuda:2'), covar=tensor([0.0433, 0.0413, 0.1358, 0.0819, 0.0525, 0.0398, 0.0361, 0.0934], + device='cuda:2'), in_proj_covar=tensor([0.0446, 0.0508, 0.0667, 0.0398, 0.0394, 0.0460, 0.0486, 0.0631], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 06:33:24,230 INFO [train.py:898] (2/4) Epoch 15, batch 450, loss[loss=0.1702, simple_loss=0.2633, pruned_loss=0.03856, over 17052.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2629, pruned_loss=0.04422, over 3241036.15 frames. ], batch size: 78, lr: 7.67e-03, grad_scale: 8.0 +2023-03-09 06:33:59,194 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.915e+02 2.968e+02 3.356e+02 4.062e+02 8.839e+02, threshold=6.712e+02, percent-clipped=1.0 +2023-03-09 06:34:23,280 INFO [train.py:898] (2/4) Epoch 15, batch 500, loss[loss=0.2001, simple_loss=0.2844, pruned_loss=0.05792, over 18622.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.263, pruned_loss=0.04426, over 3307624.57 frames. ], batch size: 52, lr: 7.66e-03, grad_scale: 8.0 +2023-03-09 06:34:29,093 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51382.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:34:40,434 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51392.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:34:44,836 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4770, 3.2307, 2.1027, 4.1928, 2.9506, 4.1185, 2.2506, 3.8517], + device='cuda:2'), covar=tensor([0.0676, 0.0903, 0.1441, 0.0538, 0.0872, 0.0292, 0.1294, 0.0395], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0223, 0.0187, 0.0267, 0.0190, 0.0259, 0.0202, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 06:35:10,687 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51418.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:35:20,790 INFO [train.py:898] (2/4) Epoch 15, batch 550, loss[loss=0.1715, simple_loss=0.2595, pruned_loss=0.04176, over 18498.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2638, pruned_loss=0.04491, over 3368393.18 frames. ], batch size: 51, lr: 7.66e-03, grad_scale: 8.0 +2023-03-09 06:35:33,751 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51438.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:35:39,563 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51443.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:35:50,521 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51453.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:35:54,565 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.873e+02 3.112e+02 3.809e+02 4.675e+02 7.627e+02, threshold=7.618e+02, percent-clipped=1.0 +2023-03-09 06:36:03,354 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51464.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 06:36:18,192 INFO [train.py:898] (2/4) Epoch 15, batch 600, loss[loss=0.1928, simple_loss=0.2806, pruned_loss=0.05246, over 18365.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.265, pruned_loss=0.04534, over 3414290.62 frames. ], batch size: 55, lr: 7.66e-03, grad_scale: 8.0 +2023-03-09 06:36:29,453 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51486.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:36:39,876 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51495.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:36:58,930 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51512.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 06:37:16,762 INFO [train.py:898] (2/4) Epoch 15, batch 650, loss[loss=0.2309, simple_loss=0.303, pruned_loss=0.07944, over 12242.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2649, pruned_loss=0.04506, over 3450765.24 frames. ], batch size: 131, lr: 7.65e-03, grad_scale: 8.0 +2023-03-09 06:37:18,092 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51528.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 06:37:36,353 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1922, 5.1278, 5.4422, 5.4363, 5.0704, 5.9394, 5.5806, 5.2442], + device='cuda:2'), covar=tensor([0.1044, 0.0672, 0.0808, 0.0654, 0.1434, 0.0695, 0.0601, 0.1674], + device='cuda:2'), in_proj_covar=tensor([0.0335, 0.0260, 0.0276, 0.0276, 0.0311, 0.0389, 0.0254, 0.0373], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0004, 0.0002, 0.0003], + device='cuda:2') +2023-03-09 06:37:51,186 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51556.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:37:51,901 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.157e+02 2.845e+02 3.389e+02 4.059e+02 1.145e+03, threshold=6.778e+02, percent-clipped=5.0 +2023-03-09 06:38:07,763 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4407, 3.2020, 2.0247, 4.1527, 2.8674, 4.1117, 2.1640, 3.5512], + device='cuda:2'), covar=tensor([0.0590, 0.0863, 0.1430, 0.0472, 0.0849, 0.0313, 0.1260, 0.0468], + device='cuda:2'), in_proj_covar=tensor([0.0205, 0.0220, 0.0183, 0.0264, 0.0187, 0.0255, 0.0199, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 06:38:15,814 INFO [train.py:898] (2/4) Epoch 15, batch 700, loss[loss=0.1746, simple_loss=0.2623, pruned_loss=0.04348, over 18408.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2643, pruned_loss=0.04485, over 3491581.01 frames. ], batch size: 48, lr: 7.65e-03, grad_scale: 8.0 +2023-03-09 06:38:42,776 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 06:38:46,162 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51602.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:39:12,450 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5386, 5.5248, 5.1679, 5.3651, 4.8201, 5.2687, 5.6405, 5.4358], + device='cuda:2'), covar=tensor([0.2823, 0.1133, 0.0893, 0.1189, 0.2578, 0.1087, 0.0937, 0.1223], + device='cuda:2'), in_proj_covar=tensor([0.0569, 0.0475, 0.0357, 0.0508, 0.0693, 0.0503, 0.0676, 0.0504], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 06:39:12,615 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5747, 3.5002, 3.3535, 2.9614, 3.3513, 2.6700, 2.6449, 3.5607], + device='cuda:2'), covar=tensor([0.0049, 0.0086, 0.0074, 0.0125, 0.0082, 0.0177, 0.0185, 0.0049], + device='cuda:2'), in_proj_covar=tensor([0.0118, 0.0139, 0.0120, 0.0174, 0.0124, 0.0167, 0.0170, 0.0100], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 06:39:14,427 INFO [train.py:898] (2/4) Epoch 15, batch 750, loss[loss=0.1731, simple_loss=0.2663, pruned_loss=0.03993, over 17013.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2649, pruned_loss=0.04503, over 3505638.03 frames. ], batch size: 78, lr: 7.65e-03, grad_scale: 8.0 +2023-03-09 06:39:22,072 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8606, 3.9032, 5.2910, 4.6487, 3.7481, 3.2884, 4.9031, 5.4910], + device='cuda:2'), covar=tensor([0.0777, 0.1615, 0.0136, 0.0311, 0.0696, 0.1023, 0.0256, 0.0230], + device='cuda:2'), in_proj_covar=tensor([0.0141, 0.0257, 0.0123, 0.0172, 0.0184, 0.0183, 0.0184, 0.0169], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 06:39:32,063 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-09 06:39:42,125 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51650.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:39:49,773 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.857e+02 2.830e+02 3.241e+02 3.850e+02 9.423e+02, threshold=6.481e+02, percent-clipped=1.0 +2023-03-09 06:39:58,028 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1308, 3.0440, 3.0273, 2.8297, 3.0447, 2.5098, 2.4954, 3.1525], + device='cuda:2'), covar=tensor([0.0065, 0.0086, 0.0079, 0.0109, 0.0089, 0.0159, 0.0175, 0.0056], + device='cuda:2'), in_proj_covar=tensor([0.0120, 0.0141, 0.0122, 0.0177, 0.0126, 0.0169, 0.0173, 0.0102], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 06:40:12,983 INFO [train.py:898] (2/4) Epoch 15, batch 800, loss[loss=0.1758, simple_loss=0.2648, pruned_loss=0.04336, over 18400.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2643, pruned_loss=0.04461, over 3530303.07 frames. ], batch size: 52, lr: 7.64e-03, grad_scale: 8.0 +2023-03-09 06:40:18,306 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-09 06:40:31,767 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.40 vs. limit=5.0 +2023-03-09 06:41:01,455 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51718.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:41:11,149 INFO [train.py:898] (2/4) Epoch 15, batch 850, loss[loss=0.1715, simple_loss=0.2562, pruned_loss=0.04336, over 17990.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2635, pruned_loss=0.0443, over 3540804.14 frames. ], batch size: 65, lr: 7.64e-03, grad_scale: 8.0 +2023-03-09 06:41:24,446 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51738.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:41:35,561 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51747.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:41:36,626 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51748.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:41:47,045 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.547e+02 2.965e+02 3.491e+02 4.119e+02 9.035e+02, threshold=6.982e+02, percent-clipped=4.0 +2023-03-09 06:41:57,164 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51766.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:42:02,019 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0569, 5.1237, 5.1728, 4.8854, 4.9231, 4.9763, 5.2653, 5.2988], + device='cuda:2'), covar=tensor([0.0062, 0.0046, 0.0058, 0.0093, 0.0055, 0.0103, 0.0058, 0.0080], + device='cuda:2'), in_proj_covar=tensor([0.0086, 0.0061, 0.0065, 0.0084, 0.0068, 0.0094, 0.0080, 0.0079], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003], + device='cuda:2') +2023-03-09 06:42:09,466 INFO [train.py:898] (2/4) Epoch 15, batch 900, loss[loss=0.1821, simple_loss=0.2639, pruned_loss=0.0502, over 18379.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2634, pruned_loss=0.04416, over 3541375.55 frames. ], batch size: 50, lr: 7.63e-03, grad_scale: 8.0 +2023-03-09 06:42:47,116 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51808.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:43:08,041 INFO [train.py:898] (2/4) Epoch 15, batch 950, loss[loss=0.1957, simple_loss=0.2856, pruned_loss=0.05296, over 18384.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2631, pruned_loss=0.04401, over 3554149.42 frames. ], batch size: 50, lr: 7.63e-03, grad_scale: 8.0 +2023-03-09 06:43:09,473 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51828.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 06:43:37,265 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51851.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:43:43,804 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.174e+02 2.846e+02 3.316e+02 4.013e+02 9.556e+02, threshold=6.632e+02, percent-clipped=3.0 +2023-03-09 06:43:45,888 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6198, 3.3631, 2.2315, 4.3830, 3.0434, 4.3359, 2.3269, 4.0711], + device='cuda:2'), covar=tensor([0.0603, 0.0809, 0.1311, 0.0389, 0.0838, 0.0272, 0.1149, 0.0340], + device='cuda:2'), in_proj_covar=tensor([0.0204, 0.0218, 0.0181, 0.0263, 0.0185, 0.0254, 0.0197, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 06:44:06,184 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=51876.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 06:44:06,919 INFO [train.py:898] (2/4) Epoch 15, batch 1000, loss[loss=0.2074, simple_loss=0.2943, pruned_loss=0.06019, over 18112.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.264, pruned_loss=0.04416, over 3563592.01 frames. ], batch size: 62, lr: 7.63e-03, grad_scale: 8.0 +2023-03-09 06:45:05,538 INFO [train.py:898] (2/4) Epoch 15, batch 1050, loss[loss=0.1844, simple_loss=0.2689, pruned_loss=0.04998, over 18333.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2633, pruned_loss=0.04371, over 3582681.04 frames. ], batch size: 56, lr: 7.62e-03, grad_scale: 8.0 +2023-03-09 06:45:30,565 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7938, 3.7214, 3.5271, 3.0524, 3.4717, 2.5288, 2.3589, 3.8067], + device='cuda:2'), covar=tensor([0.0055, 0.0097, 0.0076, 0.0155, 0.0094, 0.0250, 0.0329, 0.0061], + device='cuda:2'), in_proj_covar=tensor([0.0118, 0.0141, 0.0120, 0.0175, 0.0125, 0.0169, 0.0174, 0.0102], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 06:45:37,275 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5075, 2.1456, 2.7666, 2.5511, 3.0746, 4.9715, 4.7289, 3.7685], + device='cuda:2'), covar=tensor([0.1530, 0.2238, 0.2452, 0.1668, 0.2243, 0.0151, 0.0335, 0.0647], + device='cuda:2'), in_proj_covar=tensor([0.0265, 0.0321, 0.0347, 0.0259, 0.0372, 0.0209, 0.0276, 0.0227], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 06:45:39,862 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.929e+02 2.891e+02 3.197e+02 3.857e+02 9.406e+02, threshold=6.393e+02, percent-clipped=2.0 +2023-03-09 06:46:03,750 INFO [train.py:898] (2/4) Epoch 15, batch 1100, loss[loss=0.1886, simple_loss=0.2815, pruned_loss=0.04781, over 18264.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2631, pruned_loss=0.04372, over 3589346.75 frames. ], batch size: 60, lr: 7.62e-03, grad_scale: 8.0 +2023-03-09 06:46:59,419 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6201, 4.0298, 5.3049, 4.2007, 3.0721, 2.6622, 4.5011, 5.3882], + device='cuda:2'), covar=tensor([0.0804, 0.1265, 0.0115, 0.0437, 0.0941, 0.1211, 0.0337, 0.0160], + device='cuda:2'), in_proj_covar=tensor([0.0141, 0.0257, 0.0123, 0.0173, 0.0184, 0.0182, 0.0184, 0.0170], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 06:47:06,917 INFO [train.py:898] (2/4) Epoch 15, batch 1150, loss[loss=0.1768, simple_loss=0.2732, pruned_loss=0.04016, over 16488.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2635, pruned_loss=0.04387, over 3590475.21 frames. ], batch size: 95, lr: 7.62e-03, grad_scale: 8.0 +2023-03-09 06:47:19,934 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52038.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:47:22,308 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52040.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:47:31,836 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52048.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:47:42,029 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.912e+02 2.945e+02 3.374e+02 4.146e+02 9.750e+02, threshold=6.749e+02, percent-clipped=3.0 +2023-03-09 06:48:05,592 INFO [train.py:898] (2/4) Epoch 15, batch 1200, loss[loss=0.1796, simple_loss=0.2679, pruned_loss=0.04562, over 18246.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2637, pruned_loss=0.04394, over 3570547.31 frames. ], batch size: 60, lr: 7.61e-03, grad_scale: 8.0 +2023-03-09 06:48:15,943 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52086.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:48:27,257 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52096.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:48:33,641 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1253, 2.5423, 3.1687, 3.0698, 2.4092, 3.3711, 3.2884, 2.4477], + device='cuda:2'), covar=tensor([0.0457, 0.1172, 0.0389, 0.0332, 0.1215, 0.0269, 0.0565, 0.0900], + device='cuda:2'), in_proj_covar=tensor([0.0200, 0.0229, 0.0179, 0.0150, 0.0217, 0.0194, 0.0224, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 06:48:33,680 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52101.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:48:35,659 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52103.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:49:03,942 INFO [train.py:898] (2/4) Epoch 15, batch 1250, loss[loss=0.1808, simple_loss=0.2713, pruned_loss=0.04517, over 18220.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2634, pruned_loss=0.04393, over 3581480.85 frames. ], batch size: 60, lr: 7.61e-03, grad_scale: 8.0 +2023-03-09 06:49:12,367 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5852, 2.1578, 2.6365, 2.5219, 3.1102, 4.8252, 4.5339, 3.4817], + device='cuda:2'), covar=tensor([0.1463, 0.2189, 0.2638, 0.1624, 0.2034, 0.0150, 0.0347, 0.0709], + device='cuda:2'), in_proj_covar=tensor([0.0265, 0.0321, 0.0346, 0.0260, 0.0371, 0.0209, 0.0275, 0.0226], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 06:49:30,483 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0084, 5.3340, 2.8154, 5.2300, 5.1060, 5.3811, 5.1570, 2.9667], + device='cuda:2'), covar=tensor([0.0184, 0.0069, 0.0734, 0.0072, 0.0070, 0.0071, 0.0092, 0.0865], + device='cuda:2'), in_proj_covar=tensor([0.0082, 0.0072, 0.0091, 0.0086, 0.0081, 0.0069, 0.0080, 0.0093], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 06:49:31,471 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52151.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:49:38,915 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.755e+02 2.743e+02 3.530e+02 4.492e+02 1.221e+03, threshold=7.059e+02, percent-clipped=4.0 +2023-03-09 06:50:02,588 INFO [train.py:898] (2/4) Epoch 15, batch 1300, loss[loss=0.1559, simple_loss=0.2534, pruned_loss=0.02913, over 18306.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2627, pruned_loss=0.04385, over 3579384.08 frames. ], batch size: 54, lr: 7.61e-03, grad_scale: 8.0 +2023-03-09 06:50:21,058 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52193.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:50:27,684 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52199.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:50:58,980 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5280, 5.4849, 5.0232, 5.4136, 5.4059, 4.8565, 5.3592, 5.0906], + device='cuda:2'), covar=tensor([0.0387, 0.0402, 0.1442, 0.0779, 0.0520, 0.0385, 0.0362, 0.0886], + device='cuda:2'), in_proj_covar=tensor([0.0446, 0.0505, 0.0661, 0.0395, 0.0388, 0.0458, 0.0486, 0.0624], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 06:51:00,951 INFO [train.py:898] (2/4) Epoch 15, batch 1350, loss[loss=0.1746, simple_loss=0.2731, pruned_loss=0.03805, over 18000.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2627, pruned_loss=0.04387, over 3578136.82 frames. ], batch size: 65, lr: 7.60e-03, grad_scale: 8.0 +2023-03-09 06:51:32,283 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52254.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:51:33,410 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5506, 3.5042, 4.7876, 4.1992, 3.1162, 2.7707, 4.2102, 4.9283], + device='cuda:2'), covar=tensor([0.0844, 0.1547, 0.0184, 0.0436, 0.1023, 0.1334, 0.0433, 0.0307], + device='cuda:2'), in_proj_covar=tensor([0.0142, 0.0258, 0.0124, 0.0172, 0.0184, 0.0183, 0.0184, 0.0171], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 06:51:35,205 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.856e+02 2.890e+02 3.384e+02 4.121e+02 8.952e+02, threshold=6.768e+02, percent-clipped=2.0 +2023-03-09 06:51:44,379 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4062, 3.8254, 5.0053, 4.2050, 2.7872, 2.6239, 4.2146, 5.1899], + device='cuda:2'), covar=tensor([0.0907, 0.1555, 0.0130, 0.0409, 0.1067, 0.1250, 0.0387, 0.0161], + device='cuda:2'), in_proj_covar=tensor([0.0141, 0.0258, 0.0124, 0.0172, 0.0183, 0.0182, 0.0183, 0.0170], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 06:51:59,005 INFO [train.py:898] (2/4) Epoch 15, batch 1400, loss[loss=0.1977, simple_loss=0.2818, pruned_loss=0.05681, over 18571.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.263, pruned_loss=0.04396, over 3581734.55 frames. ], batch size: 54, lr: 7.60e-03, grad_scale: 8.0 +2023-03-09 06:52:19,242 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2186, 3.9883, 5.2714, 3.1037, 4.5739, 2.9381, 3.3050, 2.1894], + device='cuda:2'), covar=tensor([0.0811, 0.0754, 0.0111, 0.0721, 0.0455, 0.2042, 0.2326, 0.1706], + device='cuda:2'), in_proj_covar=tensor([0.0207, 0.0226, 0.0143, 0.0182, 0.0239, 0.0256, 0.0300, 0.0219], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 06:52:40,481 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52312.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:52:57,737 INFO [train.py:898] (2/4) Epoch 15, batch 1450, loss[loss=0.1713, simple_loss=0.2639, pruned_loss=0.03937, over 18293.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2633, pruned_loss=0.04418, over 3585956.09 frames. ], batch size: 49, lr: 7.59e-03, grad_scale: 8.0 +2023-03-09 06:53:01,491 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8058, 5.3351, 5.3138, 5.3074, 4.8108, 5.1931, 4.5755, 5.1366], + device='cuda:2'), covar=tensor([0.0225, 0.0250, 0.0184, 0.0353, 0.0359, 0.0198, 0.1105, 0.0280], + device='cuda:2'), in_proj_covar=tensor([0.0192, 0.0239, 0.0233, 0.0285, 0.0246, 0.0243, 0.0295, 0.0235], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 06:53:04,947 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2727, 5.2406, 4.8316, 5.1736, 5.1145, 4.5664, 5.1238, 4.7950], + device='cuda:2'), covar=tensor([0.0419, 0.0438, 0.1548, 0.0765, 0.0598, 0.0422, 0.0418, 0.1033], + device='cuda:2'), in_proj_covar=tensor([0.0448, 0.0507, 0.0667, 0.0398, 0.0392, 0.0460, 0.0491, 0.0630], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 06:53:31,869 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.869e+02 2.900e+02 3.474e+02 4.248e+02 1.297e+03, threshold=6.947e+02, percent-clipped=2.0 +2023-03-09 06:53:51,238 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52373.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:53:55,307 INFO [train.py:898] (2/4) Epoch 15, batch 1500, loss[loss=0.1834, simple_loss=0.2756, pruned_loss=0.04561, over 18299.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2635, pruned_loss=0.04412, over 3577580.75 frames. ], batch size: 57, lr: 7.59e-03, grad_scale: 8.0 +2023-03-09 06:54:18,215 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52396.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:54:26,390 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52403.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:54:54,575 INFO [train.py:898] (2/4) Epoch 15, batch 1550, loss[loss=0.1468, simple_loss=0.2371, pruned_loss=0.02822, over 18470.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.263, pruned_loss=0.04388, over 3569445.26 frames. ], batch size: 44, lr: 7.59e-03, grad_scale: 8.0 +2023-03-09 06:55:23,294 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52451.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:55:29,980 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.173e+02 2.872e+02 3.351e+02 3.820e+02 1.204e+03, threshold=6.703e+02, percent-clipped=1.0 +2023-03-09 06:55:54,081 INFO [train.py:898] (2/4) Epoch 15, batch 1600, loss[loss=0.1953, simple_loss=0.2877, pruned_loss=0.05147, over 18460.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2636, pruned_loss=0.04387, over 3579305.31 frames. ], batch size: 59, lr: 7.58e-03, grad_scale: 16.0 +2023-03-09 06:56:52,755 INFO [train.py:898] (2/4) Epoch 15, batch 1650, loss[loss=0.1656, simple_loss=0.2596, pruned_loss=0.03583, over 18384.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2632, pruned_loss=0.04389, over 3576079.16 frames. ], batch size: 50, lr: 7.58e-03, grad_scale: 16.0 +2023-03-09 06:57:18,405 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52549.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:57:25,781 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52555.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 06:57:27,650 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.792e+02 2.929e+02 3.401e+02 4.256e+02 6.511e+02, threshold=6.802e+02, percent-clipped=0.0 +2023-03-09 06:57:50,952 INFO [train.py:898] (2/4) Epoch 15, batch 1700, loss[loss=0.1632, simple_loss=0.2436, pruned_loss=0.04143, over 17662.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2624, pruned_loss=0.04366, over 3583513.45 frames. ], batch size: 39, lr: 7.58e-03, grad_scale: 16.0 +2023-03-09 06:58:06,242 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.81 vs. limit=2.0 +2023-03-09 06:58:37,291 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52616.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 06:58:49,967 INFO [train.py:898] (2/4) Epoch 15, batch 1750, loss[loss=0.1691, simple_loss=0.2561, pruned_loss=0.04104, over 18379.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2613, pruned_loss=0.0433, over 3590729.08 frames. ], batch size: 50, lr: 7.57e-03, grad_scale: 8.0 +2023-03-09 06:59:25,815 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7081, 3.4135, 2.0557, 4.4447, 2.9980, 4.3475, 2.4255, 3.8787], + device='cuda:2'), covar=tensor([0.0566, 0.0778, 0.1469, 0.0411, 0.0896, 0.0279, 0.1102, 0.0406], + device='cuda:2'), in_proj_covar=tensor([0.0206, 0.0216, 0.0183, 0.0264, 0.0186, 0.0254, 0.0197, 0.0194], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 06:59:26,520 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.883e+02 2.725e+02 3.268e+02 4.051e+02 8.662e+02, threshold=6.535e+02, percent-clipped=2.0 +2023-03-09 06:59:37,919 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52668.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 06:59:48,476 INFO [train.py:898] (2/4) Epoch 15, batch 1800, loss[loss=0.1549, simple_loss=0.2384, pruned_loss=0.03571, over 18487.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2613, pruned_loss=0.04314, over 3587212.90 frames. ], batch size: 47, lr: 7.57e-03, grad_scale: 8.0 +2023-03-09 06:59:49,946 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0100, 4.7729, 4.8460, 3.5174, 4.0524, 3.7755, 2.6935, 2.7324], + device='cuda:2'), covar=tensor([0.0192, 0.0108, 0.0060, 0.0292, 0.0262, 0.0179, 0.0744, 0.0702], + device='cuda:2'), in_proj_covar=tensor([0.0064, 0.0052, 0.0054, 0.0063, 0.0083, 0.0060, 0.0073, 0.0079], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0004, 0.0006, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 07:00:11,332 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52696.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:00:46,789 INFO [train.py:898] (2/4) Epoch 15, batch 1850, loss[loss=0.1856, simple_loss=0.2668, pruned_loss=0.05221, over 18290.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2613, pruned_loss=0.04317, over 3596262.56 frames. ], batch size: 49, lr: 7.57e-03, grad_scale: 8.0 +2023-03-09 07:01:08,153 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52744.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:01:17,392 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52752.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:01:23,879 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.980e+02 2.718e+02 3.382e+02 4.103e+02 9.791e+02, threshold=6.764e+02, percent-clipped=3.0 +2023-03-09 07:01:46,238 INFO [train.py:898] (2/4) Epoch 15, batch 1900, loss[loss=0.1731, simple_loss=0.2554, pruned_loss=0.04543, over 18253.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.262, pruned_loss=0.04335, over 3581867.84 frames. ], batch size: 47, lr: 7.56e-03, grad_scale: 8.0 +2023-03-09 07:02:30,002 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52813.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:02:45,705 INFO [train.py:898] (2/4) Epoch 15, batch 1950, loss[loss=0.1866, simple_loss=0.2757, pruned_loss=0.04877, over 17201.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2627, pruned_loss=0.04351, over 3594210.11 frames. ], batch size: 78, lr: 7.56e-03, grad_scale: 8.0 +2023-03-09 07:02:53,286 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1443, 5.1175, 4.7048, 5.0148, 5.0322, 4.4213, 4.9780, 4.6860], + device='cuda:2'), covar=tensor([0.0460, 0.0467, 0.1340, 0.0910, 0.0614, 0.0468, 0.0422, 0.1105], + device='cuda:2'), in_proj_covar=tensor([0.0454, 0.0508, 0.0665, 0.0402, 0.0393, 0.0462, 0.0495, 0.0633], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 07:03:12,909 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52849.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:03:22,535 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.851e+02 2.886e+02 3.501e+02 4.238e+02 6.243e+02, threshold=7.003e+02, percent-clipped=0.0 +2023-03-09 07:03:44,588 INFO [train.py:898] (2/4) Epoch 15, batch 2000, loss[loss=0.2007, simple_loss=0.288, pruned_loss=0.0567, over 18311.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.262, pruned_loss=0.04338, over 3586955.17 frames. ], batch size: 57, lr: 7.56e-03, grad_scale: 8.0 +2023-03-09 07:04:08,642 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=52897.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:04:25,194 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52911.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 07:04:43,164 INFO [train.py:898] (2/4) Epoch 15, batch 2050, loss[loss=0.1554, simple_loss=0.2457, pruned_loss=0.03251, over 18241.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2629, pruned_loss=0.04368, over 3588370.01 frames. ], batch size: 47, lr: 7.55e-03, grad_scale: 8.0 +2023-03-09 07:05:19,673 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.048e+02 3.109e+02 3.661e+02 4.518e+02 9.006e+02, threshold=7.322e+02, percent-clipped=1.0 +2023-03-09 07:05:20,176 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6156, 2.7974, 2.5699, 2.8997, 3.6711, 3.5378, 3.1185, 2.9220], + device='cuda:2'), covar=tensor([0.0153, 0.0289, 0.0522, 0.0328, 0.0163, 0.0166, 0.0317, 0.0358], + device='cuda:2'), in_proj_covar=tensor([0.0126, 0.0123, 0.0157, 0.0148, 0.0113, 0.0101, 0.0140, 0.0144], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 07:05:30,994 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52968.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:05:37,371 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2855, 5.8804, 5.3731, 5.5874, 5.3901, 5.3088, 5.8806, 5.8357], + device='cuda:2'), covar=tensor([0.1339, 0.0783, 0.0562, 0.0798, 0.1552, 0.0772, 0.0561, 0.0763], + device='cuda:2'), in_proj_covar=tensor([0.0571, 0.0483, 0.0353, 0.0506, 0.0694, 0.0506, 0.0680, 0.0504], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 07:05:41,842 INFO [train.py:898] (2/4) Epoch 15, batch 2100, loss[loss=0.1686, simple_loss=0.2635, pruned_loss=0.03683, over 18489.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2634, pruned_loss=0.04419, over 3582012.96 frames. ], batch size: 53, lr: 7.55e-03, grad_scale: 8.0 +2023-03-09 07:06:11,322 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.47 vs. limit=5.0 +2023-03-09 07:06:25,725 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-09 07:06:27,727 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4099, 2.6510, 2.3700, 2.7826, 3.5206, 3.4655, 2.9718, 2.8267], + device='cuda:2'), covar=tensor([0.0188, 0.0332, 0.0627, 0.0386, 0.0177, 0.0143, 0.0369, 0.0393], + device='cuda:2'), in_proj_covar=tensor([0.0128, 0.0124, 0.0159, 0.0150, 0.0114, 0.0103, 0.0141, 0.0145], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 07:06:28,694 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53016.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:06:41,339 INFO [train.py:898] (2/4) Epoch 15, batch 2150, loss[loss=0.1672, simple_loss=0.257, pruned_loss=0.03866, over 18303.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2641, pruned_loss=0.04443, over 3584777.30 frames. ], batch size: 54, lr: 7.54e-03, grad_scale: 8.0 +2023-03-09 07:06:44,306 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7397, 3.7052, 3.5587, 3.1894, 3.4870, 2.9480, 2.7812, 3.7618], + device='cuda:2'), covar=tensor([0.0049, 0.0089, 0.0068, 0.0124, 0.0083, 0.0160, 0.0183, 0.0056], + device='cuda:2'), in_proj_covar=tensor([0.0118, 0.0138, 0.0119, 0.0170, 0.0123, 0.0166, 0.0168, 0.0100], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 07:06:51,607 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.82 vs. limit=2.0 +2023-03-09 07:07:15,721 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53056.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:07:17,593 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.753e+02 2.734e+02 3.607e+02 4.426e+02 8.494e+02, threshold=7.214e+02, percent-clipped=2.0 +2023-03-09 07:07:40,263 INFO [train.py:898] (2/4) Epoch 15, batch 2200, loss[loss=0.1621, simple_loss=0.2427, pruned_loss=0.04074, over 18375.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2639, pruned_loss=0.04458, over 3596795.73 frames. ], batch size: 42, lr: 7.54e-03, grad_scale: 8.0 +2023-03-09 07:08:16,870 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53108.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:08:27,951 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53117.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:08:39,060 INFO [train.py:898] (2/4) Epoch 15, batch 2250, loss[loss=0.1822, simple_loss=0.2703, pruned_loss=0.04708, over 18244.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2628, pruned_loss=0.04402, over 3602100.92 frames. ], batch size: 60, lr: 7.54e-03, grad_scale: 8.0 +2023-03-09 07:09:15,601 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.829e+02 2.883e+02 3.334e+02 3.992e+02 8.132e+02, threshold=6.668e+02, percent-clipped=1.0 +2023-03-09 07:09:36,009 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.65 vs. limit=5.0 +2023-03-09 07:09:37,858 INFO [train.py:898] (2/4) Epoch 15, batch 2300, loss[loss=0.1509, simple_loss=0.2399, pruned_loss=0.03092, over 18542.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2623, pruned_loss=0.04378, over 3593752.86 frames. ], batch size: 49, lr: 7.53e-03, grad_scale: 8.0 +2023-03-09 07:09:43,172 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7573, 5.0394, 4.9716, 4.9943, 4.8192, 5.5432, 5.1278, 4.8740], + device='cuda:2'), covar=tensor([0.1145, 0.0722, 0.0651, 0.0778, 0.1367, 0.0819, 0.0690, 0.1832], + device='cuda:2'), in_proj_covar=tensor([0.0339, 0.0265, 0.0283, 0.0283, 0.0318, 0.0397, 0.0259, 0.0391], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:2') +2023-03-09 07:09:57,465 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.37 vs. limit=5.0 +2023-03-09 07:10:18,775 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53211.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 07:10:37,447 INFO [train.py:898] (2/4) Epoch 15, batch 2350, loss[loss=0.1885, simple_loss=0.2811, pruned_loss=0.04794, over 17075.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2625, pruned_loss=0.04383, over 3581284.57 frames. ], batch size: 78, lr: 7.53e-03, grad_scale: 8.0 +2023-03-09 07:10:54,309 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53241.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:11:14,251 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.169e+02 3.108e+02 3.704e+02 4.390e+02 1.359e+03, threshold=7.409e+02, percent-clipped=1.0 +2023-03-09 07:11:15,591 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53259.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 07:11:36,235 INFO [train.py:898] (2/4) Epoch 15, batch 2400, loss[loss=0.1644, simple_loss=0.2584, pruned_loss=0.03525, over 18565.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2619, pruned_loss=0.04366, over 3595084.73 frames. ], batch size: 54, lr: 7.53e-03, grad_scale: 8.0 +2023-03-09 07:12:02,540 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4395, 6.0497, 5.5088, 5.7646, 5.5621, 5.4571, 6.1006, 6.0327], + device='cuda:2'), covar=tensor([0.1165, 0.0711, 0.0398, 0.0708, 0.1403, 0.0732, 0.0536, 0.0671], + device='cuda:2'), in_proj_covar=tensor([0.0575, 0.0483, 0.0355, 0.0513, 0.0700, 0.0509, 0.0687, 0.0513], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 07:12:04,934 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53302.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:12:07,751 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53304.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:12:20,594 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53315.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:12:34,330 INFO [train.py:898] (2/4) Epoch 15, batch 2450, loss[loss=0.2004, simple_loss=0.2997, pruned_loss=0.05052, over 18355.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2617, pruned_loss=0.04351, over 3594689.59 frames. ], batch size: 56, lr: 7.52e-03, grad_scale: 8.0 +2023-03-09 07:13:10,225 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.975e+02 3.035e+02 3.526e+02 4.195e+02 8.583e+02, threshold=7.052e+02, percent-clipped=2.0 +2023-03-09 07:13:19,507 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53365.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:13:21,831 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53367.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:13:32,553 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53376.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:13:33,324 INFO [train.py:898] (2/4) Epoch 15, batch 2500, loss[loss=0.1626, simple_loss=0.2488, pruned_loss=0.03826, over 18284.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2609, pruned_loss=0.04352, over 3597611.03 frames. ], batch size: 49, lr: 7.52e-03, grad_scale: 8.0 +2023-03-09 07:14:09,384 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53408.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:14:14,220 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53412.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:14:32,212 INFO [train.py:898] (2/4) Epoch 15, batch 2550, loss[loss=0.1889, simple_loss=0.288, pruned_loss=0.04493, over 18210.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.262, pruned_loss=0.04388, over 3590454.62 frames. ], batch size: 60, lr: 7.52e-03, grad_scale: 8.0 +2023-03-09 07:14:33,634 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53428.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 07:14:43,993 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-09 07:14:48,320 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7810, 3.7008, 3.6062, 3.2451, 3.5141, 2.8997, 2.8784, 3.7958], + device='cuda:2'), covar=tensor([0.0052, 0.0092, 0.0065, 0.0118, 0.0086, 0.0167, 0.0188, 0.0053], + device='cuda:2'), in_proj_covar=tensor([0.0118, 0.0139, 0.0119, 0.0171, 0.0124, 0.0166, 0.0170, 0.0100], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 07:15:05,465 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53456.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:15:07,570 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.050e+02 2.986e+02 3.579e+02 4.675e+02 1.603e+03, threshold=7.159e+02, percent-clipped=6.0 +2023-03-09 07:15:28,673 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-03-09 07:15:30,224 INFO [train.py:898] (2/4) Epoch 15, batch 2600, loss[loss=0.1666, simple_loss=0.2627, pruned_loss=0.03522, over 18503.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2628, pruned_loss=0.04417, over 3585008.95 frames. ], batch size: 51, lr: 7.51e-03, grad_scale: 8.0 +2023-03-09 07:16:08,374 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0732, 5.5609, 3.1124, 5.3527, 5.2716, 5.6292, 5.3881, 3.0728], + device='cuda:2'), covar=tensor([0.0191, 0.0075, 0.0668, 0.0072, 0.0073, 0.0065, 0.0091, 0.0842], + device='cuda:2'), in_proj_covar=tensor([0.0083, 0.0074, 0.0091, 0.0088, 0.0080, 0.0070, 0.0081, 0.0094], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 07:16:22,799 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-09 07:16:29,559 INFO [train.py:898] (2/4) Epoch 15, batch 2650, loss[loss=0.1663, simple_loss=0.2521, pruned_loss=0.04029, over 18421.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2629, pruned_loss=0.04423, over 3576880.61 frames. ], batch size: 48, lr: 7.51e-03, grad_scale: 8.0 +2023-03-09 07:16:45,522 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.57 vs. limit=2.0 +2023-03-09 07:17:05,492 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.951e+02 2.884e+02 3.324e+02 4.046e+02 9.778e+02, threshold=6.647e+02, percent-clipped=2.0 +2023-03-09 07:17:27,761 INFO [train.py:898] (2/4) Epoch 15, batch 2700, loss[loss=0.1878, simple_loss=0.2813, pruned_loss=0.04718, over 18036.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2626, pruned_loss=0.04415, over 3580697.85 frames. ], batch size: 65, lr: 7.51e-03, grad_scale: 8.0 +2023-03-09 07:17:51,442 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53597.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:18:26,761 INFO [train.py:898] (2/4) Epoch 15, batch 2750, loss[loss=0.1702, simple_loss=0.2525, pruned_loss=0.04394, over 18361.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2626, pruned_loss=0.04379, over 3575033.26 frames. ], batch size: 46, lr: 7.50e-03, grad_scale: 8.0 +2023-03-09 07:19:03,249 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.144e+02 2.856e+02 3.450e+02 4.147e+02 9.079e+02, threshold=6.900e+02, percent-clipped=4.0 +2023-03-09 07:19:05,845 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53660.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:19:18,600 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53671.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:19:24,960 INFO [train.py:898] (2/4) Epoch 15, batch 2800, loss[loss=0.1952, simple_loss=0.2831, pruned_loss=0.05369, over 17198.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.263, pruned_loss=0.04409, over 3572233.77 frames. ], batch size: 78, lr: 7.50e-03, grad_scale: 8.0 +2023-03-09 07:20:06,955 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53712.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:20:19,993 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53723.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 07:20:23,823 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-03-09 07:20:24,353 INFO [train.py:898] (2/4) Epoch 15, batch 2850, loss[loss=0.1602, simple_loss=0.2456, pruned_loss=0.03741, over 18367.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2629, pruned_loss=0.04381, over 3572116.35 frames. ], batch size: 46, lr: 7.50e-03, grad_scale: 8.0 +2023-03-09 07:21:00,856 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.810e+02 2.667e+02 3.515e+02 4.128e+02 7.427e+02, threshold=7.030e+02, percent-clipped=1.0 +2023-03-09 07:21:03,258 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53760.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:21:22,592 INFO [train.py:898] (2/4) Epoch 15, batch 2900, loss[loss=0.1678, simple_loss=0.2617, pruned_loss=0.03699, over 18373.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2625, pruned_loss=0.04328, over 3584953.14 frames. ], batch size: 50, lr: 7.49e-03, grad_scale: 8.0 +2023-03-09 07:22:21,634 INFO [train.py:898] (2/4) Epoch 15, batch 2950, loss[loss=0.2083, simple_loss=0.2831, pruned_loss=0.06677, over 12925.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2628, pruned_loss=0.04337, over 3586321.34 frames. ], batch size: 130, lr: 7.49e-03, grad_scale: 8.0 +2023-03-09 07:22:58,163 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.012e+02 2.809e+02 3.403e+02 4.048e+02 1.283e+03, threshold=6.805e+02, percent-clipped=2.0 +2023-03-09 07:23:20,622 INFO [train.py:898] (2/4) Epoch 15, batch 3000, loss[loss=0.1507, simple_loss=0.2315, pruned_loss=0.03499, over 18413.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.263, pruned_loss=0.0432, over 3581848.03 frames. ], batch size: 42, lr: 7.49e-03, grad_scale: 8.0 +2023-03-09 07:23:20,622 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 07:23:32,491 INFO [train.py:932] (2/4) Epoch 15, validation: loss=0.1532, simple_loss=0.254, pruned_loss=0.02619, over 944034.00 frames. +2023-03-09 07:23:32,492 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 07:23:56,138 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53897.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:24:30,758 INFO [train.py:898] (2/4) Epoch 15, batch 3050, loss[loss=0.1999, simple_loss=0.2849, pruned_loss=0.05751, over 18413.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.262, pruned_loss=0.04313, over 3593146.18 frames. ], batch size: 52, lr: 7.48e-03, grad_scale: 4.0 +2023-03-09 07:24:52,742 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=53945.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:25:08,737 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.036e+02 2.885e+02 3.297e+02 3.882e+02 9.425e+02, threshold=6.594e+02, percent-clipped=2.0 +2023-03-09 07:25:10,158 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53960.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:25:22,666 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53971.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:25:29,421 INFO [train.py:898] (2/4) Epoch 15, batch 3100, loss[loss=0.2043, simple_loss=0.2939, pruned_loss=0.05735, over 18235.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2621, pruned_loss=0.04318, over 3601598.87 frames. ], batch size: 60, lr: 7.48e-03, grad_scale: 4.0 +2023-03-09 07:26:10,180 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=54008.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:26:23,269 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=54019.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:26:27,802 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=54023.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 07:26:31,791 INFO [train.py:898] (2/4) Epoch 15, batch 3150, loss[loss=0.2202, simple_loss=0.2928, pruned_loss=0.07381, over 12695.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2631, pruned_loss=0.04364, over 3591912.02 frames. ], batch size: 130, lr: 7.48e-03, grad_scale: 4.0 +2023-03-09 07:27:09,869 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.040e+02 2.859e+02 3.550e+02 4.150e+02 8.480e+02, threshold=7.099e+02, percent-clipped=1.0 +2023-03-09 07:27:24,062 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=54071.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:27:30,828 INFO [train.py:898] (2/4) Epoch 15, batch 3200, loss[loss=0.1462, simple_loss=0.2332, pruned_loss=0.02961, over 18361.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2622, pruned_loss=0.04367, over 3586133.73 frames. ], batch size: 46, lr: 7.47e-03, grad_scale: 8.0 +2023-03-09 07:27:45,078 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5379, 2.7152, 3.9513, 3.4937, 2.4075, 4.1264, 3.7193, 2.7804], + device='cuda:2'), covar=tensor([0.0432, 0.1340, 0.0267, 0.0366, 0.1608, 0.0228, 0.0492, 0.0952], + device='cuda:2'), in_proj_covar=tensor([0.0203, 0.0231, 0.0185, 0.0152, 0.0221, 0.0200, 0.0231, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 07:28:28,994 INFO [train.py:898] (2/4) Epoch 15, batch 3250, loss[loss=0.176, simple_loss=0.2709, pruned_loss=0.04058, over 17839.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2622, pruned_loss=0.04364, over 3596231.00 frames. ], batch size: 70, lr: 7.47e-03, grad_scale: 8.0 +2023-03-09 07:28:45,753 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3308, 5.8538, 5.4324, 5.5960, 5.4327, 5.2968, 5.8960, 5.8312], + device='cuda:2'), covar=tensor([0.1100, 0.0739, 0.0513, 0.0696, 0.1432, 0.0753, 0.0536, 0.0692], + device='cuda:2'), in_proj_covar=tensor([0.0580, 0.0482, 0.0360, 0.0511, 0.0701, 0.0511, 0.0684, 0.0514], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 07:29:07,310 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.872e+02 2.643e+02 3.129e+02 4.018e+02 8.489e+02, threshold=6.258e+02, percent-clipped=1.0 +2023-03-09 07:29:28,037 INFO [train.py:898] (2/4) Epoch 15, batch 3300, loss[loss=0.1826, simple_loss=0.2776, pruned_loss=0.04382, over 18622.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2621, pruned_loss=0.04356, over 3595688.91 frames. ], batch size: 52, lr: 7.46e-03, grad_scale: 8.0 +2023-03-09 07:30:26,986 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3203, 5.2339, 5.4579, 5.5484, 5.1741, 6.0132, 5.6327, 5.3735], + device='cuda:2'), covar=tensor([0.1071, 0.0632, 0.0764, 0.0692, 0.1546, 0.0732, 0.0700, 0.1590], + device='cuda:2'), in_proj_covar=tensor([0.0336, 0.0261, 0.0282, 0.0281, 0.0315, 0.0392, 0.0257, 0.0385], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:2') +2023-03-09 07:30:27,872 INFO [train.py:898] (2/4) Epoch 15, batch 3350, loss[loss=0.1617, simple_loss=0.2562, pruned_loss=0.03362, over 18494.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2618, pruned_loss=0.04359, over 3579086.45 frames. ], batch size: 51, lr: 7.46e-03, grad_scale: 8.0 +2023-03-09 07:31:05,791 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.725e+02 2.934e+02 3.343e+02 4.171e+02 1.510e+03, threshold=6.685e+02, percent-clipped=4.0 +2023-03-09 07:31:26,833 INFO [train.py:898] (2/4) Epoch 15, batch 3400, loss[loss=0.1679, simple_loss=0.2584, pruned_loss=0.03872, over 18530.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2615, pruned_loss=0.04376, over 3567790.06 frames. ], batch size: 49, lr: 7.46e-03, grad_scale: 8.0 +2023-03-09 07:32:24,912 INFO [train.py:898] (2/4) Epoch 15, batch 3450, loss[loss=0.1723, simple_loss=0.2645, pruned_loss=0.04004, over 18288.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2608, pruned_loss=0.04339, over 3577472.51 frames. ], batch size: 49, lr: 7.45e-03, grad_scale: 8.0 +2023-03-09 07:32:34,984 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5057, 3.4031, 2.2102, 4.2462, 2.9922, 4.2654, 2.2170, 3.9416], + device='cuda:2'), covar=tensor([0.0599, 0.0816, 0.1437, 0.0517, 0.0862, 0.0292, 0.1287, 0.0366], + device='cuda:2'), in_proj_covar=tensor([0.0207, 0.0220, 0.0185, 0.0269, 0.0187, 0.0257, 0.0198, 0.0194], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 07:33:02,929 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.097e+02 3.124e+02 3.686e+02 4.996e+02 1.186e+03, threshold=7.372e+02, percent-clipped=7.0 +2023-03-09 07:33:23,275 INFO [train.py:898] (2/4) Epoch 15, batch 3500, loss[loss=0.17, simple_loss=0.2575, pruned_loss=0.04124, over 18296.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2607, pruned_loss=0.04281, over 3593819.11 frames. ], batch size: 49, lr: 7.45e-03, grad_scale: 8.0 +2023-03-09 07:34:20,581 INFO [train.py:898] (2/4) Epoch 15, batch 3550, loss[loss=0.1746, simple_loss=0.2663, pruned_loss=0.04149, over 17129.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2613, pruned_loss=0.04309, over 3588808.89 frames. ], batch size: 78, lr: 7.45e-03, grad_scale: 8.0 +2023-03-09 07:34:54,892 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.932e+02 3.002e+02 3.469e+02 4.157e+02 6.561e+02, threshold=6.938e+02, percent-clipped=0.0 +2023-03-09 07:35:13,937 INFO [train.py:898] (2/4) Epoch 15, batch 3600, loss[loss=0.1878, simple_loss=0.2769, pruned_loss=0.04936, over 16019.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2616, pruned_loss=0.0436, over 3585931.27 frames. ], batch size: 94, lr: 7.44e-03, grad_scale: 8.0 +2023-03-09 07:36:16,272 INFO [train.py:898] (2/4) Epoch 16, batch 0, loss[loss=0.1791, simple_loss=0.2688, pruned_loss=0.04466, over 18486.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2688, pruned_loss=0.04466, over 18486.00 frames. ], batch size: 51, lr: 7.20e-03, grad_scale: 8.0 +2023-03-09 07:36:16,272 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 07:36:28,069 INFO [train.py:932] (2/4) Epoch 16, validation: loss=0.1541, simple_loss=0.2552, pruned_loss=0.02651, over 944034.00 frames. +2023-03-09 07:36:28,070 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 07:37:06,299 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9532, 3.8893, 3.8060, 3.4209, 3.6800, 2.9590, 3.0688, 3.8807], + device='cuda:2'), covar=tensor([0.0046, 0.0074, 0.0059, 0.0117, 0.0069, 0.0177, 0.0171, 0.0056], + device='cuda:2'), in_proj_covar=tensor([0.0118, 0.0138, 0.0118, 0.0170, 0.0122, 0.0165, 0.0168, 0.0100], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 07:37:24,164 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.170e+02 3.007e+02 3.556e+02 4.370e+02 7.449e+02, threshold=7.113e+02, percent-clipped=5.0 +2023-03-09 07:37:26,543 INFO [train.py:898] (2/4) Epoch 16, batch 50, loss[loss=0.2069, simple_loss=0.282, pruned_loss=0.06593, over 12107.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2628, pruned_loss=0.04275, over 805237.54 frames. ], batch size: 130, lr: 7.20e-03, grad_scale: 8.0 +2023-03-09 07:38:25,630 INFO [train.py:898] (2/4) Epoch 16, batch 100, loss[loss=0.1799, simple_loss=0.267, pruned_loss=0.04636, over 18138.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2608, pruned_loss=0.04152, over 1433270.56 frames. ], batch size: 62, lr: 7.20e-03, grad_scale: 8.0 +2023-03-09 07:39:21,650 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.042e+02 2.818e+02 3.254e+02 3.972e+02 9.345e+02, threshold=6.508e+02, percent-clipped=3.0 +2023-03-09 07:39:23,840 INFO [train.py:898] (2/4) Epoch 16, batch 150, loss[loss=0.1743, simple_loss=0.2683, pruned_loss=0.0402, over 18288.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.26, pruned_loss=0.04147, over 1918211.06 frames. ], batch size: 57, lr: 7.19e-03, grad_scale: 8.0 +2023-03-09 07:39:27,631 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54664.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 07:39:47,465 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4950, 3.2654, 1.9441, 4.1401, 2.8945, 4.0206, 2.2117, 3.6857], + device='cuda:2'), covar=tensor([0.0573, 0.0880, 0.1485, 0.0502, 0.0907, 0.0273, 0.1257, 0.0429], + device='cuda:2'), in_proj_covar=tensor([0.0207, 0.0220, 0.0185, 0.0268, 0.0187, 0.0254, 0.0196, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 07:40:22,366 INFO [train.py:898] (2/4) Epoch 16, batch 200, loss[loss=0.1612, simple_loss=0.261, pruned_loss=0.03073, over 18298.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2621, pruned_loss=0.0424, over 2281823.73 frames. ], batch size: 54, lr: 7.19e-03, grad_scale: 8.0 +2023-03-09 07:40:38,420 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54725.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 07:40:39,499 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4102, 2.6728, 2.3848, 2.8204, 3.5480, 3.4345, 3.0429, 2.9064], + device='cuda:2'), covar=tensor([0.0239, 0.0309, 0.0582, 0.0367, 0.0153, 0.0140, 0.0314, 0.0345], + device='cuda:2'), in_proj_covar=tensor([0.0130, 0.0127, 0.0161, 0.0150, 0.0114, 0.0105, 0.0145, 0.0146], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 07:41:17,922 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.955e+02 3.015e+02 3.636e+02 4.590e+02 9.600e+02, threshold=7.273e+02, percent-clipped=5.0 +2023-03-09 07:41:18,574 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-09 07:41:20,199 INFO [train.py:898] (2/4) Epoch 16, batch 250, loss[loss=0.1734, simple_loss=0.2616, pruned_loss=0.04257, over 18305.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2629, pruned_loss=0.04273, over 2575413.72 frames. ], batch size: 54, lr: 7.19e-03, grad_scale: 8.0 +2023-03-09 07:41:49,846 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3344, 3.2303, 3.2531, 2.9149, 3.1676, 2.5385, 2.6698, 3.2972], + device='cuda:2'), covar=tensor([0.0059, 0.0090, 0.0068, 0.0109, 0.0095, 0.0165, 0.0164, 0.0062], + device='cuda:2'), in_proj_covar=tensor([0.0117, 0.0138, 0.0117, 0.0168, 0.0123, 0.0164, 0.0166, 0.0100], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 07:42:17,954 INFO [train.py:898] (2/4) Epoch 16, batch 300, loss[loss=0.1776, simple_loss=0.2695, pruned_loss=0.04279, over 18475.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2631, pruned_loss=0.04341, over 2799966.54 frames. ], batch size: 59, lr: 7.18e-03, grad_scale: 8.0 +2023-03-09 07:42:40,207 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.18 vs. limit=5.0 +2023-03-09 07:43:14,226 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.101e+02 2.934e+02 3.524e+02 4.515e+02 1.434e+03, threshold=7.048e+02, percent-clipped=4.0 +2023-03-09 07:43:16,400 INFO [train.py:898] (2/4) Epoch 16, batch 350, loss[loss=0.1912, simple_loss=0.2734, pruned_loss=0.05455, over 18630.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2626, pruned_loss=0.04353, over 2978532.64 frames. ], batch size: 52, lr: 7.18e-03, grad_scale: 8.0 +2023-03-09 07:43:17,953 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54862.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:43:33,413 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.2341, 5.3885, 2.7215, 5.2316, 5.0182, 5.3597, 5.1129, 2.3299], + device='cuda:2'), covar=tensor([0.0178, 0.0094, 0.0933, 0.0116, 0.0104, 0.0116, 0.0150, 0.1652], + device='cuda:2'), in_proj_covar=tensor([0.0083, 0.0074, 0.0091, 0.0087, 0.0080, 0.0071, 0.0081, 0.0095], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 07:44:15,026 INFO [train.py:898] (2/4) Epoch 16, batch 400, loss[loss=0.1562, simple_loss=0.2451, pruned_loss=0.03361, over 18341.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2627, pruned_loss=0.04336, over 3114176.57 frames. ], batch size: 46, lr: 7.18e-03, grad_scale: 8.0 +2023-03-09 07:44:26,685 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9716, 3.8536, 3.7746, 3.4212, 3.6832, 3.0598, 3.0108, 3.9550], + device='cuda:2'), covar=tensor([0.0044, 0.0071, 0.0062, 0.0104, 0.0073, 0.0150, 0.0176, 0.0076], + device='cuda:2'), in_proj_covar=tensor([0.0119, 0.0139, 0.0119, 0.0171, 0.0124, 0.0166, 0.0168, 0.0101], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 07:44:28,937 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54923.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 07:44:46,443 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 07:44:55,742 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.48 vs. limit=5.0 +2023-03-09 07:45:00,700 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7910, 4.3202, 4.5664, 3.3408, 3.6966, 3.3903, 2.6346, 2.4725], + device='cuda:2'), covar=tensor([0.0221, 0.0215, 0.0079, 0.0333, 0.0376, 0.0245, 0.0743, 0.0874], + device='cuda:2'), in_proj_covar=tensor([0.0066, 0.0055, 0.0056, 0.0065, 0.0087, 0.0063, 0.0077, 0.0083], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 07:45:00,737 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5551, 3.3994, 2.1362, 4.3236, 2.9499, 4.2597, 2.3338, 3.8398], + device='cuda:2'), covar=tensor([0.0636, 0.0818, 0.1471, 0.0472, 0.0934, 0.0332, 0.1215, 0.0431], + device='cuda:2'), in_proj_covar=tensor([0.0207, 0.0221, 0.0186, 0.0268, 0.0188, 0.0256, 0.0196, 0.0196], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 07:45:11,971 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.963e+02 2.717e+02 3.272e+02 3.926e+02 7.148e+02, threshold=6.544e+02, percent-clipped=1.0 +2023-03-09 07:45:13,184 INFO [train.py:898] (2/4) Epoch 16, batch 450, loss[loss=0.1743, simple_loss=0.2592, pruned_loss=0.0447, over 18298.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2628, pruned_loss=0.04367, over 3218040.64 frames. ], batch size: 54, lr: 7.17e-03, grad_scale: 8.0 +2023-03-09 07:45:36,124 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8761, 3.5608, 4.9317, 4.2376, 3.2458, 2.9063, 4.2996, 5.0885], + device='cuda:2'), covar=tensor([0.0795, 0.1431, 0.0147, 0.0379, 0.0931, 0.1188, 0.0413, 0.0205], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0266, 0.0130, 0.0174, 0.0184, 0.0185, 0.0186, 0.0175], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 07:46:12,498 INFO [train.py:898] (2/4) Epoch 16, batch 500, loss[loss=0.1543, simple_loss=0.2326, pruned_loss=0.03802, over 18412.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2627, pruned_loss=0.04385, over 3282513.60 frames. ], batch size: 42, lr: 7.17e-03, grad_scale: 8.0 +2023-03-09 07:46:23,065 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55020.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 07:46:41,660 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.57 vs. limit=5.0 +2023-03-09 07:46:41,853 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.73 vs. limit=5.0 +2023-03-09 07:47:02,752 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8388, 2.4174, 2.1846, 2.4109, 2.9800, 2.9777, 2.7811, 2.5794], + device='cuda:2'), covar=tensor([0.0249, 0.0280, 0.0580, 0.0427, 0.0201, 0.0163, 0.0380, 0.0399], + device='cuda:2'), in_proj_covar=tensor([0.0130, 0.0126, 0.0160, 0.0149, 0.0115, 0.0105, 0.0145, 0.0145], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 07:47:09,467 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.142e+02 2.826e+02 3.526e+02 4.225e+02 1.034e+03, threshold=7.052e+02, percent-clipped=4.0 +2023-03-09 07:47:10,627 INFO [train.py:898] (2/4) Epoch 16, batch 550, loss[loss=0.1887, simple_loss=0.2771, pruned_loss=0.05013, over 17960.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2624, pruned_loss=0.04349, over 3357048.97 frames. ], batch size: 65, lr: 7.17e-03, grad_scale: 8.0 +2023-03-09 07:47:21,502 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6156, 2.1634, 2.5582, 2.5089, 3.2217, 4.8290, 4.4466, 3.3737], + device='cuda:2'), covar=tensor([0.1602, 0.2366, 0.2958, 0.1757, 0.2098, 0.0174, 0.0402, 0.0791], + device='cuda:2'), in_proj_covar=tensor([0.0273, 0.0331, 0.0356, 0.0264, 0.0379, 0.0216, 0.0281, 0.0233], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 07:48:09,191 INFO [train.py:898] (2/4) Epoch 16, batch 600, loss[loss=0.1753, simple_loss=0.2639, pruned_loss=0.04332, over 18385.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2623, pruned_loss=0.04323, over 3406969.72 frames. ], batch size: 52, lr: 7.16e-03, grad_scale: 8.0 +2023-03-09 07:48:51,688 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55148.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:49:05,764 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.722e+02 2.752e+02 3.250e+02 4.071e+02 8.362e+02, threshold=6.500e+02, percent-clipped=2.0 +2023-03-09 07:49:06,164 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8876, 4.9439, 5.0425, 4.7287, 4.6856, 4.7912, 5.1045, 5.1437], + device='cuda:2'), covar=tensor([0.0062, 0.0062, 0.0053, 0.0091, 0.0066, 0.0135, 0.0062, 0.0081], + device='cuda:2'), in_proj_covar=tensor([0.0089, 0.0065, 0.0069, 0.0088, 0.0071, 0.0098, 0.0083, 0.0083], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 07:49:06,961 INFO [train.py:898] (2/4) Epoch 16, batch 650, loss[loss=0.1734, simple_loss=0.2711, pruned_loss=0.03782, over 18410.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2617, pruned_loss=0.04287, over 3460246.27 frames. ], batch size: 52, lr: 7.16e-03, grad_scale: 8.0 +2023-03-09 07:50:03,500 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55209.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:50:05,816 INFO [train.py:898] (2/4) Epoch 16, batch 700, loss[loss=0.144, simple_loss=0.2296, pruned_loss=0.02921, over 18486.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2616, pruned_loss=0.04277, over 3489880.71 frames. ], batch size: 44, lr: 7.16e-03, grad_scale: 4.0 +2023-03-09 07:50:06,136 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55211.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:50:14,568 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55218.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 07:51:02,974 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.749e+02 2.606e+02 3.222e+02 3.898e+02 7.462e+02, threshold=6.443e+02, percent-clipped=3.0 +2023-03-09 07:51:03,011 INFO [train.py:898] (2/4) Epoch 16, batch 750, loss[loss=0.1443, simple_loss=0.2363, pruned_loss=0.02614, over 18545.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2615, pruned_loss=0.04283, over 3515591.09 frames. ], batch size: 49, lr: 7.15e-03, grad_scale: 4.0 +2023-03-09 07:51:17,399 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55272.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:52:01,616 INFO [train.py:898] (2/4) Epoch 16, batch 800, loss[loss=0.2127, simple_loss=0.2843, pruned_loss=0.07054, over 12403.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2612, pruned_loss=0.04305, over 3521163.22 frames. ], batch size: 129, lr: 7.15e-03, grad_scale: 8.0 +2023-03-09 07:52:05,777 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-09 07:52:13,192 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55320.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 07:53:00,379 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.160e+02 2.840e+02 3.265e+02 3.800e+02 8.424e+02, threshold=6.530e+02, percent-clipped=5.0 +2023-03-09 07:53:00,405 INFO [train.py:898] (2/4) Epoch 16, batch 850, loss[loss=0.1544, simple_loss=0.2353, pruned_loss=0.03674, over 18263.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2608, pruned_loss=0.04256, over 3544730.21 frames. ], batch size: 45, lr: 7.15e-03, grad_scale: 8.0 +2023-03-09 07:53:08,442 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55368.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 07:53:59,236 INFO [train.py:898] (2/4) Epoch 16, batch 900, loss[loss=0.1897, simple_loss=0.2833, pruned_loss=0.04799, over 16075.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2604, pruned_loss=0.04269, over 3547053.10 frames. ], batch size: 94, lr: 7.15e-03, grad_scale: 8.0 +2023-03-09 07:54:04,232 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9333, 3.7872, 5.0609, 4.4270, 3.2975, 3.1267, 4.4241, 5.2227], + device='cuda:2'), covar=tensor([0.0711, 0.1460, 0.0139, 0.0349, 0.0852, 0.1024, 0.0361, 0.0252], + device='cuda:2'), in_proj_covar=tensor([0.0141, 0.0262, 0.0128, 0.0172, 0.0181, 0.0181, 0.0183, 0.0172], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 07:54:36,574 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-09 07:54:57,055 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.979e+02 2.881e+02 3.338e+02 4.155e+02 1.042e+03, threshold=6.676e+02, percent-clipped=4.0 +2023-03-09 07:54:57,081 INFO [train.py:898] (2/4) Epoch 16, batch 950, loss[loss=0.1469, simple_loss=0.2373, pruned_loss=0.0283, over 18379.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2609, pruned_loss=0.04267, over 3554345.87 frames. ], batch size: 46, lr: 7.14e-03, grad_scale: 8.0 +2023-03-09 07:55:43,832 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.84 vs. limit=2.0 +2023-03-09 07:55:46,656 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55504.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:55:54,397 INFO [train.py:898] (2/4) Epoch 16, batch 1000, loss[loss=0.1755, simple_loss=0.2653, pruned_loss=0.04287, over 18303.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2607, pruned_loss=0.04254, over 3569484.02 frames. ], batch size: 54, lr: 7.14e-03, grad_scale: 8.0 +2023-03-09 07:56:03,323 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55518.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 07:56:53,745 INFO [train.py:898] (2/4) Epoch 16, batch 1050, loss[loss=0.1875, simple_loss=0.276, pruned_loss=0.04949, over 17116.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2619, pruned_loss=0.04304, over 3562283.48 frames. ], batch size: 78, lr: 7.14e-03, grad_scale: 4.0 +2023-03-09 07:56:54,804 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.207e+02 2.979e+02 3.472e+02 4.235e+02 7.011e+02, threshold=6.944e+02, percent-clipped=2.0 +2023-03-09 07:57:00,130 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55566.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:57:01,344 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55567.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 07:57:52,416 INFO [train.py:898] (2/4) Epoch 16, batch 1100, loss[loss=0.1683, simple_loss=0.253, pruned_loss=0.04178, over 18565.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2609, pruned_loss=0.04283, over 3577880.23 frames. ], batch size: 45, lr: 7.13e-03, grad_scale: 4.0 +2023-03-09 07:58:52,113 INFO [train.py:898] (2/4) Epoch 16, batch 1150, loss[loss=0.18, simple_loss=0.2689, pruned_loss=0.0455, over 18076.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2609, pruned_loss=0.04269, over 3581640.13 frames. ], batch size: 62, lr: 7.13e-03, grad_scale: 4.0 +2023-03-09 07:58:53,244 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.682e+02 3.124e+02 3.807e+02 4.865e+02 2.142e+03, threshold=7.614e+02, percent-clipped=11.0 +2023-03-09 07:59:50,936 INFO [train.py:898] (2/4) Epoch 16, batch 1200, loss[loss=0.1643, simple_loss=0.2574, pruned_loss=0.03559, over 16354.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2613, pruned_loss=0.04251, over 3579728.76 frames. ], batch size: 94, lr: 7.13e-03, grad_scale: 8.0 +2023-03-09 08:00:35,018 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55748.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:00:36,337 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.28 vs. limit=5.0 +2023-03-09 08:00:48,846 INFO [train.py:898] (2/4) Epoch 16, batch 1250, loss[loss=0.1707, simple_loss=0.2601, pruned_loss=0.04064, over 18623.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2616, pruned_loss=0.04285, over 3594903.64 frames. ], batch size: 52, lr: 7.12e-03, grad_scale: 8.0 +2023-03-09 08:00:49,972 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.937e+02 2.771e+02 3.226e+02 3.849e+02 7.150e+02, threshold=6.452e+02, percent-clipped=0.0 +2023-03-09 08:01:30,940 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7294, 5.0424, 4.9988, 5.3409, 4.7354, 5.0324, 3.8690, 5.0698], + device='cuda:2'), covar=tensor([0.0298, 0.0634, 0.0472, 0.0320, 0.0431, 0.0381, 0.2411, 0.0367], + device='cuda:2'), in_proj_covar=tensor([0.0197, 0.0244, 0.0235, 0.0289, 0.0249, 0.0245, 0.0295, 0.0237], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 08:01:39,859 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55804.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:01:45,640 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55809.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:01:47,523 INFO [train.py:898] (2/4) Epoch 16, batch 1300, loss[loss=0.1675, simple_loss=0.2581, pruned_loss=0.03847, over 18550.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2616, pruned_loss=0.0427, over 3587570.72 frames. ], batch size: 49, lr: 7.12e-03, grad_scale: 8.0 +2023-03-09 08:02:17,640 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6850, 2.1727, 2.5832, 2.6113, 3.1800, 4.9920, 4.6985, 3.5297], + device='cuda:2'), covar=tensor([0.1580, 0.2461, 0.2780, 0.1786, 0.2336, 0.0229, 0.0374, 0.0791], + device='cuda:2'), in_proj_covar=tensor([0.0275, 0.0331, 0.0357, 0.0265, 0.0381, 0.0219, 0.0283, 0.0234], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 08:02:30,148 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55848.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:02:34,746 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55852.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:02:45,455 INFO [train.py:898] (2/4) Epoch 16, batch 1350, loss[loss=0.1622, simple_loss=0.2538, pruned_loss=0.03526, over 18281.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2617, pruned_loss=0.04282, over 3587582.89 frames. ], batch size: 49, lr: 7.12e-03, grad_scale: 8.0 +2023-03-09 08:02:46,543 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.956e+02 2.894e+02 3.394e+02 4.145e+02 8.688e+02, threshold=6.789e+02, percent-clipped=2.0 +2023-03-09 08:02:52,184 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55867.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:03:42,193 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55909.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:03:43,925 INFO [train.py:898] (2/4) Epoch 16, batch 1400, loss[loss=0.1603, simple_loss=0.2424, pruned_loss=0.03904, over 18244.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2609, pruned_loss=0.04258, over 3599932.87 frames. ], batch size: 45, lr: 7.11e-03, grad_scale: 8.0 +2023-03-09 08:03:48,615 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=55915.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:04:42,841 INFO [train.py:898] (2/4) Epoch 16, batch 1450, loss[loss=0.1641, simple_loss=0.2545, pruned_loss=0.03686, over 18536.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2607, pruned_loss=0.0425, over 3596749.03 frames. ], batch size: 49, lr: 7.11e-03, grad_scale: 8.0 +2023-03-09 08:04:43,986 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.940e+02 2.882e+02 3.410e+02 3.952e+02 8.442e+02, threshold=6.821e+02, percent-clipped=3.0 +2023-03-09 08:04:56,984 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3558, 2.7025, 2.2541, 2.6984, 3.4281, 3.3533, 2.8890, 2.7840], + device='cuda:2'), covar=tensor([0.0187, 0.0287, 0.0656, 0.0360, 0.0186, 0.0171, 0.0433, 0.0375], + device='cuda:2'), in_proj_covar=tensor([0.0129, 0.0126, 0.0159, 0.0149, 0.0116, 0.0106, 0.0147, 0.0145], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 08:05:46,797 INFO [train.py:898] (2/4) Epoch 16, batch 1500, loss[loss=0.1653, simple_loss=0.2627, pruned_loss=0.03401, over 18354.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2612, pruned_loss=0.04274, over 3587016.17 frames. ], batch size: 55, lr: 7.11e-03, grad_scale: 8.0 +2023-03-09 08:05:54,069 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7551, 3.6827, 5.2239, 4.5247, 3.3307, 3.2134, 4.6191, 5.3959], + device='cuda:2'), covar=tensor([0.0798, 0.1694, 0.0117, 0.0322, 0.0857, 0.1026, 0.0322, 0.0168], + device='cuda:2'), in_proj_covar=tensor([0.0143, 0.0264, 0.0130, 0.0175, 0.0185, 0.0185, 0.0186, 0.0176], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 08:06:43,890 INFO [train.py:898] (2/4) Epoch 16, batch 1550, loss[loss=0.1886, simple_loss=0.279, pruned_loss=0.04908, over 17756.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2601, pruned_loss=0.04236, over 3591996.15 frames. ], batch size: 70, lr: 7.10e-03, grad_scale: 8.0 +2023-03-09 08:06:44,927 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.796e+02 2.823e+02 3.373e+02 3.914e+02 6.631e+02, threshold=6.746e+02, percent-clipped=0.0 +2023-03-09 08:07:33,339 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56104.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:07:41,131 INFO [train.py:898] (2/4) Epoch 16, batch 1600, loss[loss=0.1893, simple_loss=0.2784, pruned_loss=0.05015, over 16006.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2599, pruned_loss=0.04238, over 3590552.00 frames. ], batch size: 94, lr: 7.10e-03, grad_scale: 8.0 +2023-03-09 08:07:45,364 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9262, 3.1021, 4.4749, 3.8490, 2.9720, 4.7729, 4.0604, 3.0702], + device='cuda:2'), covar=tensor([0.0390, 0.1193, 0.0188, 0.0349, 0.1283, 0.0155, 0.0433, 0.0824], + device='cuda:2'), in_proj_covar=tensor([0.0199, 0.0226, 0.0183, 0.0150, 0.0216, 0.0198, 0.0229, 0.0191], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 08:08:21,140 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56145.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:08:39,551 INFO [train.py:898] (2/4) Epoch 16, batch 1650, loss[loss=0.1923, simple_loss=0.2873, pruned_loss=0.04862, over 18376.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2605, pruned_loss=0.04244, over 3583721.36 frames. ], batch size: 55, lr: 7.10e-03, grad_scale: 8.0 +2023-03-09 08:08:40,624 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.176e+02 2.932e+02 3.714e+02 4.558e+02 1.092e+03, threshold=7.428e+02, percent-clipped=5.0 +2023-03-09 08:09:20,836 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.3693, 3.0532, 3.9210, 3.6572, 2.9971, 2.9464, 3.6302, 4.0070], + device='cuda:2'), covar=tensor([0.0838, 0.1388, 0.0229, 0.0378, 0.0825, 0.0947, 0.0421, 0.0326], + device='cuda:2'), in_proj_covar=tensor([0.0143, 0.0264, 0.0130, 0.0175, 0.0184, 0.0184, 0.0186, 0.0176], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 08:09:30,609 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56204.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:09:33,103 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56206.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:09:38,405 INFO [train.py:898] (2/4) Epoch 16, batch 1700, loss[loss=0.2142, simple_loss=0.2904, pruned_loss=0.06902, over 13076.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2607, pruned_loss=0.04262, over 3572454.98 frames. ], batch size: 130, lr: 7.09e-03, grad_scale: 8.0 +2023-03-09 08:10:31,418 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6327, 3.6525, 5.0542, 4.2959, 3.1994, 2.9896, 4.3604, 5.2183], + device='cuda:2'), covar=tensor([0.0805, 0.1755, 0.0149, 0.0403, 0.0927, 0.1156, 0.0382, 0.0188], + device='cuda:2'), in_proj_covar=tensor([0.0143, 0.0264, 0.0130, 0.0174, 0.0184, 0.0184, 0.0186, 0.0175], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 08:10:36,642 INFO [train.py:898] (2/4) Epoch 16, batch 1750, loss[loss=0.156, simple_loss=0.239, pruned_loss=0.03645, over 18485.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2611, pruned_loss=0.04275, over 3578270.08 frames. ], batch size: 44, lr: 7.09e-03, grad_scale: 8.0 +2023-03-09 08:10:37,715 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.941e+02 2.914e+02 3.484e+02 4.158e+02 1.053e+03, threshold=6.969e+02, percent-clipped=1.0 +2023-03-09 08:10:59,193 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.7193, 5.6048, 5.2932, 5.5800, 5.5797, 4.9547, 5.5120, 5.2254], + device='cuda:2'), covar=tensor([0.0366, 0.0376, 0.1250, 0.0716, 0.0492, 0.0374, 0.0360, 0.1042], + device='cuda:2'), in_proj_covar=tensor([0.0448, 0.0511, 0.0658, 0.0403, 0.0398, 0.0468, 0.0500, 0.0632], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 08:11:19,971 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5659, 3.6212, 4.9468, 4.2452, 3.1614, 2.9089, 4.2849, 5.1167], + device='cuda:2'), covar=tensor([0.0865, 0.1499, 0.0193, 0.0422, 0.0969, 0.1225, 0.0433, 0.0317], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0265, 0.0131, 0.0175, 0.0185, 0.0186, 0.0187, 0.0177], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 08:11:29,065 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 08:11:36,016 INFO [train.py:898] (2/4) Epoch 16, batch 1800, loss[loss=0.2004, simple_loss=0.2862, pruned_loss=0.05733, over 12478.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2612, pruned_loss=0.04279, over 3569415.61 frames. ], batch size: 130, lr: 7.09e-03, grad_scale: 8.0 +2023-03-09 08:11:38,879 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-09 08:12:34,548 INFO [train.py:898] (2/4) Epoch 16, batch 1850, loss[loss=0.1768, simple_loss=0.2633, pruned_loss=0.04516, over 16222.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2616, pruned_loss=0.04316, over 3565177.65 frames. ], batch size: 94, lr: 7.09e-03, grad_scale: 8.0 +2023-03-09 08:12:35,496 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.821e+02 3.124e+02 3.834e+02 4.699e+02 1.584e+03, threshold=7.668e+02, percent-clipped=5.0 +2023-03-09 08:13:24,896 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56404.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:13:32,351 INFO [train.py:898] (2/4) Epoch 16, batch 1900, loss[loss=0.1742, simple_loss=0.2609, pruned_loss=0.04381, over 18284.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2619, pruned_loss=0.04332, over 3567309.33 frames. ], batch size: 57, lr: 7.08e-03, grad_scale: 8.0 +2023-03-09 08:14:20,037 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56452.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:14:30,010 INFO [train.py:898] (2/4) Epoch 16, batch 1950, loss[loss=0.1928, simple_loss=0.2721, pruned_loss=0.05673, over 12334.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2619, pruned_loss=0.04336, over 3563866.93 frames. ], batch size: 129, lr: 7.08e-03, grad_scale: 8.0 +2023-03-09 08:14:31,033 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.810e+02 3.032e+02 3.372e+02 4.243e+02 1.557e+03, threshold=6.744e+02, percent-clipped=4.0 +2023-03-09 08:14:35,870 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-09 08:14:44,302 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9039, 5.5178, 5.4563, 5.4541, 5.0696, 5.4165, 4.8283, 5.3537], + device='cuda:2'), covar=tensor([0.0206, 0.0232, 0.0171, 0.0331, 0.0331, 0.0175, 0.1034, 0.0269], + device='cuda:2'), in_proj_covar=tensor([0.0199, 0.0248, 0.0243, 0.0297, 0.0253, 0.0250, 0.0300, 0.0242], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 08:15:16,852 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56501.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:15:20,506 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56504.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:15:27,956 INFO [train.py:898] (2/4) Epoch 16, batch 2000, loss[loss=0.1524, simple_loss=0.2347, pruned_loss=0.03505, over 18490.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2625, pruned_loss=0.04334, over 3571336.23 frames. ], batch size: 44, lr: 7.08e-03, grad_scale: 8.0 +2023-03-09 08:15:42,593 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56523.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 08:16:16,964 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56552.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:16:19,312 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3006, 5.2175, 5.5711, 5.5820, 5.2220, 6.1131, 5.7146, 5.3832], + device='cuda:2'), covar=tensor([0.1080, 0.0609, 0.0679, 0.0591, 0.1383, 0.0699, 0.0617, 0.1564], + device='cuda:2'), in_proj_covar=tensor([0.0337, 0.0268, 0.0286, 0.0283, 0.0317, 0.0396, 0.0261, 0.0388], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:2') +2023-03-09 08:16:26,998 INFO [train.py:898] (2/4) Epoch 16, batch 2050, loss[loss=0.1543, simple_loss=0.2307, pruned_loss=0.03899, over 18410.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2606, pruned_loss=0.0425, over 3578428.78 frames. ], batch size: 43, lr: 7.07e-03, grad_scale: 8.0 +2023-03-09 08:16:28,118 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.109e+02 2.908e+02 3.297e+02 4.101e+02 7.290e+02, threshold=6.593e+02, percent-clipped=1.0 +2023-03-09 08:16:54,707 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56584.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 08:17:26,197 INFO [train.py:898] (2/4) Epoch 16, batch 2100, loss[loss=0.1727, simple_loss=0.266, pruned_loss=0.0397, over 16996.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.261, pruned_loss=0.04233, over 3579804.06 frames. ], batch size: 78, lr: 7.07e-03, grad_scale: 4.0 +2023-03-09 08:18:25,143 INFO [train.py:898] (2/4) Epoch 16, batch 2150, loss[loss=0.1887, simple_loss=0.2763, pruned_loss=0.05055, over 18295.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2618, pruned_loss=0.0428, over 3566848.92 frames. ], batch size: 57, lr: 7.07e-03, grad_scale: 4.0 +2023-03-09 08:18:27,230 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.074e+02 2.907e+02 3.397e+02 4.221e+02 7.037e+02, threshold=6.794e+02, percent-clipped=2.0 +2023-03-09 08:19:23,204 INFO [train.py:898] (2/4) Epoch 16, batch 2200, loss[loss=0.1744, simple_loss=0.2663, pruned_loss=0.04122, over 18359.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.262, pruned_loss=0.04282, over 3572450.91 frames. ], batch size: 56, lr: 7.06e-03, grad_scale: 4.0 +2023-03-09 08:20:21,590 INFO [train.py:898] (2/4) Epoch 16, batch 2250, loss[loss=0.1645, simple_loss=0.2468, pruned_loss=0.04113, over 18501.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2607, pruned_loss=0.04273, over 3578139.49 frames. ], batch size: 44, lr: 7.06e-03, grad_scale: 4.0 +2023-03-09 08:20:23,717 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.871e+02 2.822e+02 3.228e+02 3.719e+02 7.082e+02, threshold=6.456e+02, percent-clipped=1.0 +2023-03-09 08:21:02,947 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3847, 5.4191, 4.9319, 5.2704, 5.3769, 4.6906, 5.1982, 5.0087], + device='cuda:2'), covar=tensor([0.0448, 0.0366, 0.1316, 0.0780, 0.0548, 0.0450, 0.0446, 0.0857], + device='cuda:2'), in_proj_covar=tensor([0.0449, 0.0512, 0.0663, 0.0409, 0.0400, 0.0469, 0.0500, 0.0632], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 08:21:09,285 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56801.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:21:11,626 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56803.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:21:20,393 INFO [train.py:898] (2/4) Epoch 16, batch 2300, loss[loss=0.1824, simple_loss=0.2782, pruned_loss=0.04328, over 18222.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.261, pruned_loss=0.04269, over 3587456.55 frames. ], batch size: 60, lr: 7.06e-03, grad_scale: 4.0 +2023-03-09 08:22:03,642 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0607, 4.4853, 4.2436, 4.2166, 4.0558, 4.7065, 4.4006, 4.0839], + device='cuda:2'), covar=tensor([0.1517, 0.1324, 0.0977, 0.0968, 0.1808, 0.1461, 0.0829, 0.2531], + device='cuda:2'), in_proj_covar=tensor([0.0342, 0.0273, 0.0291, 0.0292, 0.0321, 0.0402, 0.0266, 0.0396], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:2') +2023-03-09 08:22:04,698 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=56849.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:22:17,102 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-09 08:22:18,546 INFO [train.py:898] (2/4) Epoch 16, batch 2350, loss[loss=0.1618, simple_loss=0.2548, pruned_loss=0.03436, over 18395.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2607, pruned_loss=0.04274, over 3577618.82 frames. ], batch size: 52, lr: 7.05e-03, grad_scale: 4.0 +2023-03-09 08:22:18,888 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7793, 4.8033, 4.8685, 4.6418, 4.6213, 4.6914, 5.0249, 4.9343], + device='cuda:2'), covar=tensor([0.0077, 0.0067, 0.0068, 0.0102, 0.0068, 0.0137, 0.0064, 0.0090], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0065, 0.0068, 0.0088, 0.0072, 0.0098, 0.0083, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 08:22:20,840 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.203e+02 2.978e+02 3.446e+02 4.170e+02 7.854e+02, threshold=6.893e+02, percent-clipped=4.0 +2023-03-09 08:22:22,384 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56864.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:22:39,132 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56879.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 08:22:49,504 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-09 08:23:13,910 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56908.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:23:17,003 INFO [train.py:898] (2/4) Epoch 16, batch 2400, loss[loss=0.1631, simple_loss=0.2653, pruned_loss=0.0305, over 18561.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2609, pruned_loss=0.04258, over 3576339.48 frames. ], batch size: 54, lr: 7.05e-03, grad_scale: 8.0 +2023-03-09 08:23:57,272 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7896, 4.5151, 4.5891, 3.5508, 3.8944, 3.4153, 2.5966, 2.2977], + device='cuda:2'), covar=tensor([0.0205, 0.0154, 0.0086, 0.0290, 0.0272, 0.0226, 0.0741, 0.0911], + device='cuda:2'), in_proj_covar=tensor([0.0064, 0.0052, 0.0055, 0.0063, 0.0083, 0.0061, 0.0073, 0.0080], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0004, 0.0006, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 08:24:15,262 INFO [train.py:898] (2/4) Epoch 16, batch 2450, loss[loss=0.1818, simple_loss=0.2719, pruned_loss=0.04583, over 18098.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.261, pruned_loss=0.04265, over 3577365.27 frames. ], batch size: 62, lr: 7.05e-03, grad_scale: 8.0 +2023-03-09 08:24:17,552 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.256e+02 2.910e+02 3.453e+02 4.291e+02 1.108e+03, threshold=6.907e+02, percent-clipped=4.0 +2023-03-09 08:24:24,725 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56969.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:24:40,446 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5359, 3.4807, 4.6760, 4.1991, 3.1043, 3.0516, 4.0154, 4.8657], + device='cuda:2'), covar=tensor([0.0842, 0.1558, 0.0231, 0.0380, 0.0923, 0.1073, 0.0432, 0.0295], + device='cuda:2'), in_proj_covar=tensor([0.0141, 0.0259, 0.0129, 0.0172, 0.0182, 0.0184, 0.0183, 0.0176], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 08:24:52,064 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56993.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:24:54,246 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56995.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:25:13,046 INFO [train.py:898] (2/4) Epoch 16, batch 2500, loss[loss=0.1611, simple_loss=0.2469, pruned_loss=0.03767, over 18570.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2613, pruned_loss=0.04285, over 3566172.68 frames. ], batch size: 45, lr: 7.04e-03, grad_scale: 8.0 +2023-03-09 08:26:03,249 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57054.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:26:05,400 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57056.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:26:11,280 INFO [train.py:898] (2/4) Epoch 16, batch 2550, loss[loss=0.1918, simple_loss=0.2825, pruned_loss=0.05056, over 17128.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2607, pruned_loss=0.04243, over 3569456.91 frames. ], batch size: 78, lr: 7.04e-03, grad_scale: 8.0 +2023-03-09 08:26:13,775 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.929e+02 2.789e+02 3.538e+02 4.534e+02 7.082e+02, threshold=7.077e+02, percent-clipped=1.0 +2023-03-09 08:26:34,749 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.35 vs. limit=5.0 +2023-03-09 08:26:44,764 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8948, 5.0024, 5.0599, 4.8103, 4.7725, 4.7636, 5.1444, 5.0985], + device='cuda:2'), covar=tensor([0.0075, 0.0054, 0.0050, 0.0088, 0.0065, 0.0146, 0.0061, 0.0082], + device='cuda:2'), in_proj_covar=tensor([0.0088, 0.0064, 0.0067, 0.0086, 0.0070, 0.0096, 0.0081, 0.0081], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003], + device='cuda:2') +2023-03-09 08:27:09,955 INFO [train.py:898] (2/4) Epoch 16, batch 2600, loss[loss=0.1818, simple_loss=0.2668, pruned_loss=0.04838, over 16023.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2608, pruned_loss=0.04242, over 3574238.68 frames. ], batch size: 94, lr: 7.04e-03, grad_scale: 8.0 +2023-03-09 08:28:05,890 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57159.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:28:07,951 INFO [train.py:898] (2/4) Epoch 16, batch 2650, loss[loss=0.1742, simple_loss=0.2661, pruned_loss=0.04111, over 17235.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2609, pruned_loss=0.04268, over 3579604.77 frames. ], batch size: 78, lr: 7.04e-03, grad_scale: 4.0 +2023-03-09 08:28:11,697 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.853e+02 2.763e+02 3.335e+02 4.015e+02 1.057e+03, threshold=6.669e+02, percent-clipped=2.0 +2023-03-09 08:28:29,581 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57179.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 08:28:37,457 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57186.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:28:49,793 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57197.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:29:06,054 INFO [train.py:898] (2/4) Epoch 16, batch 2700, loss[loss=0.1779, simple_loss=0.2727, pruned_loss=0.04161, over 18569.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2609, pruned_loss=0.04241, over 3588940.03 frames. ], batch size: 54, lr: 7.03e-03, grad_scale: 4.0 +2023-03-09 08:29:25,743 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57227.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 08:29:44,867 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57244.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:29:48,272 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57247.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:30:01,232 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57258.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 08:30:04,239 INFO [train.py:898] (2/4) Epoch 16, batch 2750, loss[loss=0.1669, simple_loss=0.2548, pruned_loss=0.03948, over 18406.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2617, pruned_loss=0.04279, over 3574060.07 frames. ], batch size: 48, lr: 7.03e-03, grad_scale: 4.0 +2023-03-09 08:30:08,181 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.113e+02 2.970e+02 3.336e+02 3.947e+02 1.031e+03, threshold=6.671e+02, percent-clipped=3.0 +2023-03-09 08:30:08,381 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57264.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:30:55,632 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57305.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:31:02,573 INFO [train.py:898] (2/4) Epoch 16, batch 2800, loss[loss=0.1851, simple_loss=0.2784, pruned_loss=0.0459, over 18210.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2616, pruned_loss=0.04273, over 3581813.68 frames. ], batch size: 60, lr: 7.03e-03, grad_scale: 8.0 +2023-03-09 08:31:42,873 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6838, 2.0609, 2.6995, 2.6937, 3.2229, 4.9624, 4.6927, 3.6166], + device='cuda:2'), covar=tensor([0.1542, 0.2361, 0.2743, 0.1637, 0.2190, 0.0174, 0.0367, 0.0741], + device='cuda:2'), in_proj_covar=tensor([0.0278, 0.0334, 0.0360, 0.0267, 0.0382, 0.0221, 0.0286, 0.0236], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 08:31:47,085 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57349.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:31:47,196 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.6065, 5.5635, 5.1890, 5.5627, 5.5228, 4.9212, 5.4568, 5.2121], + device='cuda:2'), covar=tensor([0.0405, 0.0413, 0.1355, 0.0739, 0.0489, 0.0391, 0.0374, 0.0889], + device='cuda:2'), in_proj_covar=tensor([0.0456, 0.0517, 0.0672, 0.0414, 0.0408, 0.0475, 0.0501, 0.0639], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 08:31:49,452 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57351.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:32:00,273 INFO [train.py:898] (2/4) Epoch 16, batch 2850, loss[loss=0.1586, simple_loss=0.2482, pruned_loss=0.0345, over 18501.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2614, pruned_loss=0.04278, over 3580251.01 frames. ], batch size: 47, lr: 7.02e-03, grad_scale: 8.0 +2023-03-09 08:32:04,039 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.919e+02 2.832e+02 3.405e+02 3.993e+02 9.421e+02, threshold=6.810e+02, percent-clipped=3.0 +2023-03-09 08:32:43,984 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1159, 5.6799, 5.2656, 5.4073, 5.2853, 5.1079, 5.7092, 5.6594], + device='cuda:2'), covar=tensor([0.1088, 0.0681, 0.0634, 0.0756, 0.1318, 0.0743, 0.0604, 0.0673], + device='cuda:2'), in_proj_covar=tensor([0.0573, 0.0490, 0.0365, 0.0515, 0.0709, 0.0515, 0.0691, 0.0522], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 08:32:44,124 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57398.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:32:46,731 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1275, 2.9724, 3.0207, 2.8108, 2.9525, 2.4148, 2.5413, 3.1311], + device='cuda:2'), covar=tensor([0.0069, 0.0104, 0.0079, 0.0128, 0.0091, 0.0171, 0.0169, 0.0067], + device='cuda:2'), in_proj_covar=tensor([0.0123, 0.0144, 0.0122, 0.0176, 0.0129, 0.0168, 0.0172, 0.0106], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 08:32:54,681 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0622, 4.2191, 2.3243, 4.1046, 5.2787, 2.6173, 3.9359, 4.0868], + device='cuda:2'), covar=tensor([0.0158, 0.1223, 0.1738, 0.0688, 0.0065, 0.1297, 0.0674, 0.0691], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0259, 0.0198, 0.0192, 0.0107, 0.0181, 0.0212, 0.0220], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 08:32:58,032 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6678, 3.6815, 5.0259, 4.4069, 3.3053, 3.0365, 4.4825, 5.2675], + device='cuda:2'), covar=tensor([0.0840, 0.1462, 0.0169, 0.0396, 0.0868, 0.1189, 0.0361, 0.0203], + device='cuda:2'), in_proj_covar=tensor([0.0142, 0.0261, 0.0130, 0.0173, 0.0183, 0.0184, 0.0184, 0.0176], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 08:32:58,704 INFO [train.py:898] (2/4) Epoch 16, batch 2900, loss[loss=0.1685, simple_loss=0.26, pruned_loss=0.03847, over 18374.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.261, pruned_loss=0.04238, over 3586938.30 frames. ], batch size: 50, lr: 7.02e-03, grad_scale: 8.0 +2023-03-09 08:33:41,225 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-09 08:33:46,744 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-09 08:33:50,175 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-09 08:33:55,354 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57459.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:33:55,399 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57459.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:33:57,207 INFO [train.py:898] (2/4) Epoch 16, batch 2950, loss[loss=0.146, simple_loss=0.2349, pruned_loss=0.02857, over 18289.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2615, pruned_loss=0.04276, over 3574752.31 frames. ], batch size: 49, lr: 7.02e-03, grad_scale: 8.0 +2023-03-09 08:33:58,798 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57462.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:33:59,968 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5033, 5.4732, 5.0732, 5.4503, 5.4370, 4.8107, 5.3333, 5.1197], + device='cuda:2'), covar=tensor([0.0411, 0.0377, 0.1465, 0.0776, 0.0485, 0.0445, 0.0406, 0.0941], + device='cuda:2'), in_proj_covar=tensor([0.0460, 0.0520, 0.0676, 0.0416, 0.0409, 0.0478, 0.0506, 0.0643], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 08:34:00,785 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.919e+02 2.681e+02 3.202e+02 3.928e+02 6.706e+02, threshold=6.405e+02, percent-clipped=1.0 +2023-03-09 08:34:38,243 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57495.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:34:51,824 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57507.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:34:56,200 INFO [train.py:898] (2/4) Epoch 16, batch 3000, loss[loss=0.1557, simple_loss=0.2461, pruned_loss=0.03269, over 18272.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2606, pruned_loss=0.04247, over 3585915.59 frames. ], batch size: 47, lr: 7.01e-03, grad_scale: 8.0 +2023-03-09 08:34:56,201 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 08:35:05,545 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8914, 3.8938, 3.6343, 3.7822, 3.8177, 3.3117, 3.7834, 3.6318], + device='cuda:2'), covar=tensor([0.0494, 0.0594, 0.1497, 0.0854, 0.0698, 0.0615, 0.0522, 0.1125], + device='cuda:2'), in_proj_covar=tensor([0.0454, 0.0516, 0.0669, 0.0413, 0.0406, 0.0474, 0.0503, 0.0635], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 08:35:08,107 INFO [train.py:932] (2/4) Epoch 16, validation: loss=0.1522, simple_loss=0.2529, pruned_loss=0.02576, over 944034.00 frames. +2023-03-09 08:35:08,108 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 08:35:23,878 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57523.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:35:44,951 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57542.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:35:57,291 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57553.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 08:36:00,828 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57556.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:36:05,943 INFO [train.py:898] (2/4) Epoch 16, batch 3050, loss[loss=0.2429, simple_loss=0.3141, pruned_loss=0.08588, over 13188.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.261, pruned_loss=0.04277, over 3578532.41 frames. ], batch size: 130, lr: 7.01e-03, grad_scale: 8.0 +2023-03-09 08:36:09,983 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.073e+02 2.813e+02 3.441e+02 4.204e+02 1.352e+03, threshold=6.882e+02, percent-clipped=6.0 +2023-03-09 08:36:10,910 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57564.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:36:30,728 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6065, 5.1335, 5.0968, 5.0481, 4.5968, 4.9995, 4.4279, 4.9599], + device='cuda:2'), covar=tensor([0.0294, 0.0339, 0.0234, 0.0508, 0.0467, 0.0260, 0.1196, 0.0345], + device='cuda:2'), in_proj_covar=tensor([0.0199, 0.0248, 0.0242, 0.0302, 0.0255, 0.0250, 0.0299, 0.0244], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 08:36:52,852 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57600.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:37:05,422 INFO [train.py:898] (2/4) Epoch 16, batch 3100, loss[loss=0.1709, simple_loss=0.2604, pruned_loss=0.04071, over 18288.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2599, pruned_loss=0.0425, over 3589592.60 frames. ], batch size: 49, lr: 7.01e-03, grad_scale: 8.0 +2023-03-09 08:37:06,733 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57612.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:37:43,552 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6727, 2.9004, 2.4922, 3.0167, 3.6815, 3.6130, 3.1546, 2.9876], + device='cuda:2'), covar=tensor([0.0188, 0.0291, 0.0579, 0.0365, 0.0179, 0.0192, 0.0381, 0.0412], + device='cuda:2'), in_proj_covar=tensor([0.0128, 0.0127, 0.0159, 0.0148, 0.0117, 0.0105, 0.0146, 0.0144], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 08:37:51,351 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57649.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:37:53,587 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57651.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:38:04,661 INFO [train.py:898] (2/4) Epoch 16, batch 3150, loss[loss=0.1827, simple_loss=0.2782, pruned_loss=0.04357, over 18265.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2603, pruned_loss=0.04258, over 3584392.84 frames. ], batch size: 60, lr: 7.01e-03, grad_scale: 8.0 +2023-03-09 08:38:08,020 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.831e+02 2.982e+02 3.518e+02 4.061e+02 7.623e+02, threshold=7.037e+02, percent-clipped=2.0 +2023-03-09 08:38:48,032 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57697.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:38:50,422 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57699.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:39:03,753 INFO [train.py:898] (2/4) Epoch 16, batch 3200, loss[loss=0.1558, simple_loss=0.2404, pruned_loss=0.03558, over 18371.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2595, pruned_loss=0.04217, over 3595753.55 frames. ], batch size: 46, lr: 7.00e-03, grad_scale: 8.0 +2023-03-09 08:39:30,477 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57733.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:39:54,283 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57754.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:40:02,254 INFO [train.py:898] (2/4) Epoch 16, batch 3250, loss[loss=0.1823, simple_loss=0.2735, pruned_loss=0.04556, over 16071.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2589, pruned_loss=0.04197, over 3604475.85 frames. ], batch size: 94, lr: 7.00e-03, grad_scale: 8.0 +2023-03-09 08:40:05,688 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.974e+02 2.787e+02 3.348e+02 4.125e+02 7.388e+02, threshold=6.695e+02, percent-clipped=1.0 +2023-03-09 08:40:33,231 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7012, 3.0661, 2.3202, 3.1021, 3.7046, 3.6879, 3.3076, 3.1385], + device='cuda:2'), covar=tensor([0.0136, 0.0213, 0.0713, 0.0281, 0.0130, 0.0127, 0.0273, 0.0284], + device='cuda:2'), in_proj_covar=tensor([0.0128, 0.0127, 0.0160, 0.0149, 0.0117, 0.0105, 0.0146, 0.0145], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 08:40:41,813 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57794.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:40:46,413 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9410, 3.7291, 5.0262, 2.9593, 4.3916, 2.6543, 3.0669, 1.8556], + device='cuda:2'), covar=tensor([0.1045, 0.0794, 0.0120, 0.0798, 0.0526, 0.2462, 0.2660, 0.2052], + device='cuda:2'), in_proj_covar=tensor([0.0211, 0.0233, 0.0155, 0.0185, 0.0246, 0.0259, 0.0308, 0.0226], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 08:40:50,319 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-09 08:41:00,953 INFO [train.py:898] (2/4) Epoch 16, batch 3300, loss[loss=0.1777, simple_loss=0.2699, pruned_loss=0.04279, over 18310.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2604, pruned_loss=0.04256, over 3593145.95 frames. ], batch size: 57, lr: 7.00e-03, grad_scale: 8.0 +2023-03-09 08:41:09,093 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57818.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:41:35,615 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2567, 2.6466, 2.2608, 2.7266, 3.2824, 3.2637, 2.8895, 2.7143], + device='cuda:2'), covar=tensor([0.0139, 0.0221, 0.0586, 0.0303, 0.0137, 0.0142, 0.0324, 0.0348], + device='cuda:2'), in_proj_covar=tensor([0.0129, 0.0127, 0.0160, 0.0149, 0.0117, 0.0105, 0.0147, 0.0145], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 08:41:37,664 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57842.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:41:37,865 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0854, 4.2590, 2.4786, 4.2406, 5.2576, 2.5641, 3.9858, 3.9678], + device='cuda:2'), covar=tensor([0.0133, 0.1028, 0.1637, 0.0543, 0.0071, 0.1319, 0.0622, 0.0715], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0260, 0.0199, 0.0192, 0.0108, 0.0181, 0.0213, 0.0220], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 08:41:47,832 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57851.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:41:50,168 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57853.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 08:41:58,565 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-03-09 08:41:59,083 INFO [train.py:898] (2/4) Epoch 16, batch 3350, loss[loss=0.1639, simple_loss=0.2573, pruned_loss=0.03528, over 17995.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2607, pruned_loss=0.04244, over 3579539.95 frames. ], batch size: 65, lr: 6.99e-03, grad_scale: 8.0 +2023-03-09 08:42:02,094 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.53 vs. limit=2.0 +2023-03-09 08:42:02,567 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.007e+02 2.795e+02 3.328e+02 4.480e+02 9.325e+02, threshold=6.655e+02, percent-clipped=2.0 +2023-03-09 08:42:33,259 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57890.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:42:44,974 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57900.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:42:46,000 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57901.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:42:57,108 INFO [train.py:898] (2/4) Epoch 16, batch 3400, loss[loss=0.1913, simple_loss=0.2825, pruned_loss=0.05004, over 16188.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2601, pruned_loss=0.04217, over 3572442.94 frames. ], batch size: 94, lr: 6.99e-03, grad_scale: 8.0 +2023-03-09 08:43:13,435 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.70 vs. limit=5.0 +2023-03-09 08:43:41,005 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=57948.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:43:46,053 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-03-09 08:43:55,559 INFO [train.py:898] (2/4) Epoch 16, batch 3450, loss[loss=0.1605, simple_loss=0.2572, pruned_loss=0.03189, over 18349.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2593, pruned_loss=0.0419, over 3573718.62 frames. ], batch size: 55, lr: 6.99e-03, grad_scale: 8.0 +2023-03-09 08:43:58,790 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.652e+02 2.617e+02 3.263e+02 4.009e+02 9.440e+02, threshold=6.526e+02, percent-clipped=3.0 +2023-03-09 08:44:59,186 INFO [train.py:898] (2/4) Epoch 16, batch 3500, loss[loss=0.1624, simple_loss=0.2508, pruned_loss=0.03697, over 18355.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2596, pruned_loss=0.04192, over 3583443.22 frames. ], batch size: 46, lr: 6.98e-03, grad_scale: 8.0 +2023-03-09 08:45:38,387 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7048, 4.2586, 4.4163, 3.3618, 3.6302, 3.4349, 2.5817, 2.5532], + device='cuda:2'), covar=tensor([0.0231, 0.0196, 0.0079, 0.0265, 0.0327, 0.0201, 0.0651, 0.0787], + device='cuda:2'), in_proj_covar=tensor([0.0066, 0.0054, 0.0058, 0.0065, 0.0087, 0.0062, 0.0075, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 08:45:48,095 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58054.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:45:55,319 INFO [train.py:898] (2/4) Epoch 16, batch 3550, loss[loss=0.1704, simple_loss=0.258, pruned_loss=0.04133, over 18392.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2599, pruned_loss=0.04201, over 3566745.12 frames. ], batch size: 52, lr: 6.98e-03, grad_scale: 8.0 +2023-03-09 08:45:58,562 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.098e+02 2.999e+02 3.554e+02 4.304e+02 1.121e+03, threshold=7.108e+02, percent-clipped=3.0 +2023-03-09 08:46:26,744 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58089.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:46:40,554 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58102.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:46:50,128 INFO [train.py:898] (2/4) Epoch 16, batch 3600, loss[loss=0.1759, simple_loss=0.2647, pruned_loss=0.04358, over 17053.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2593, pruned_loss=0.04159, over 3581638.29 frames. ], batch size: 78, lr: 6.98e-03, grad_scale: 8.0 +2023-03-09 08:46:58,122 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58118.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:47:21,701 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58141.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:47:54,109 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58144.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:47:54,869 INFO [train.py:898] (2/4) Epoch 17, batch 0, loss[loss=0.1791, simple_loss=0.2775, pruned_loss=0.04029, over 18604.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2775, pruned_loss=0.04029, over 18604.00 frames. ], batch size: 52, lr: 6.77e-03, grad_scale: 8.0 +2023-03-09 08:47:54,869 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 08:48:01,184 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4751, 2.1431, 2.4185, 2.5073, 3.0184, 4.5612, 4.3057, 3.0174], + device='cuda:2'), covar=tensor([0.1828, 0.2531, 0.2968, 0.1874, 0.2307, 0.0199, 0.0395, 0.0952], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0330, 0.0356, 0.0264, 0.0376, 0.0218, 0.0282, 0.0234], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 08:48:01,368 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0345, 5.4291, 5.3544, 5.3075, 5.0887, 5.0053, 5.5067, 5.4493], + device='cuda:2'), covar=tensor([0.1131, 0.0767, 0.0276, 0.0661, 0.1323, 0.0689, 0.0538, 0.0716], + device='cuda:2'), in_proj_covar=tensor([0.0578, 0.0495, 0.0361, 0.0524, 0.0711, 0.0516, 0.0696, 0.0525], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 08:48:03,284 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.3521, 2.0231, 1.9713, 1.9565, 2.4183, 2.5018, 2.3720, 2.2050], + device='cuda:2'), covar=tensor([0.0297, 0.0267, 0.0619, 0.0418, 0.0226, 0.0200, 0.0431, 0.0384], + device='cuda:2'), in_proj_covar=tensor([0.0129, 0.0127, 0.0158, 0.0149, 0.0115, 0.0103, 0.0146, 0.0145], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 08:48:06,680 INFO [train.py:932] (2/4) Epoch 17, validation: loss=0.1527, simple_loss=0.2537, pruned_loss=0.02582, over 944034.00 frames. +2023-03-09 08:48:06,680 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 08:48:13,739 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58151.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:48:30,198 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.965e+02 2.893e+02 3.453e+02 4.374e+02 8.967e+02, threshold=6.906e+02, percent-clipped=3.0 +2023-03-09 08:48:32,614 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58166.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:49:05,042 INFO [train.py:898] (2/4) Epoch 17, batch 50, loss[loss=0.1728, simple_loss=0.2617, pruned_loss=0.04192, over 18499.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2672, pruned_loss=0.04607, over 791470.35 frames. ], batch size: 53, lr: 6.76e-03, grad_scale: 8.0 +2023-03-09 08:49:06,467 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0354, 5.2109, 2.4292, 5.1425, 4.8744, 5.2182, 4.9923, 2.3030], + device='cuda:2'), covar=tensor([0.0215, 0.0119, 0.1119, 0.0115, 0.0108, 0.0169, 0.0160, 0.1689], + device='cuda:2'), in_proj_covar=tensor([0.0082, 0.0075, 0.0092, 0.0089, 0.0081, 0.0070, 0.0081, 0.0094], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 08:49:09,578 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58199.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:49:13,622 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58202.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:49:16,947 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58205.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:50:03,235 INFO [train.py:898] (2/4) Epoch 17, batch 100, loss[loss=0.1647, simple_loss=0.251, pruned_loss=0.03923, over 18279.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.261, pruned_loss=0.04288, over 1416871.26 frames. ], batch size: 49, lr: 6.76e-03, grad_scale: 8.0 +2023-03-09 08:50:26,005 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.069e+02 2.900e+02 3.440e+02 4.043e+02 9.296e+02, threshold=6.881e+02, percent-clipped=1.0 +2023-03-09 08:50:36,410 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9538, 4.6101, 4.1802, 4.3220, 4.0346, 4.7430, 4.4947, 4.2382], + device='cuda:2'), covar=tensor([0.1455, 0.0968, 0.0956, 0.0764, 0.1455, 0.1248, 0.0635, 0.1987], + device='cuda:2'), in_proj_covar=tensor([0.0334, 0.0266, 0.0284, 0.0283, 0.0311, 0.0394, 0.0258, 0.0386], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:2') +2023-03-09 08:51:02,239 INFO [train.py:898] (2/4) Epoch 17, batch 150, loss[loss=0.1425, simple_loss=0.2265, pruned_loss=0.02928, over 18429.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2604, pruned_loss=0.04281, over 1879869.81 frames. ], batch size: 43, lr: 6.76e-03, grad_scale: 8.0 +2023-03-09 08:52:01,274 INFO [train.py:898] (2/4) Epoch 17, batch 200, loss[loss=0.1483, simple_loss=0.2307, pruned_loss=0.03297, over 18248.00 frames. ], tot_loss[loss=0.172, simple_loss=0.26, pruned_loss=0.04203, over 2266039.81 frames. ], batch size: 45, lr: 6.75e-03, grad_scale: 8.0 +2023-03-09 08:52:22,849 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.674e+02 2.879e+02 3.254e+02 3.939e+02 7.173e+02, threshold=6.508e+02, percent-clipped=1.0 +2023-03-09 08:52:53,499 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58389.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:52:59,835 INFO [train.py:898] (2/4) Epoch 17, batch 250, loss[loss=0.167, simple_loss=0.2558, pruned_loss=0.03905, over 18492.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2603, pruned_loss=0.0419, over 2566194.99 frames. ], batch size: 51, lr: 6.75e-03, grad_scale: 8.0 +2023-03-09 08:53:01,518 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.3456, 3.1830, 4.3885, 3.9577, 3.0949, 3.0020, 3.9537, 4.5348], + device='cuda:2'), covar=tensor([0.0895, 0.1497, 0.0191, 0.0396, 0.0874, 0.0995, 0.0376, 0.0264], + device='cuda:2'), in_proj_covar=tensor([0.0143, 0.0261, 0.0130, 0.0173, 0.0184, 0.0183, 0.0185, 0.0177], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 08:53:18,113 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-09 08:53:19,218 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.55 vs. limit=2.0 +2023-03-09 08:53:50,116 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58437.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:53:59,032 INFO [train.py:898] (2/4) Epoch 17, batch 300, loss[loss=0.1784, simple_loss=0.2662, pruned_loss=0.04527, over 18271.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2605, pruned_loss=0.04185, over 2795500.61 frames. ], batch size: 47, lr: 6.75e-03, grad_scale: 8.0 +2023-03-09 08:54:03,798 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9346, 5.1274, 5.1170, 5.2198, 4.8882, 5.7679, 5.3284, 5.0738], + device='cuda:2'), covar=tensor([0.1237, 0.0684, 0.0875, 0.0769, 0.1563, 0.0829, 0.0721, 0.1566], + device='cuda:2'), in_proj_covar=tensor([0.0337, 0.0266, 0.0285, 0.0286, 0.0313, 0.0396, 0.0259, 0.0386], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:2') +2023-03-09 08:54:20,579 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.698e+02 2.907e+02 3.543e+02 4.450e+02 1.655e+03, threshold=7.087e+02, percent-clipped=7.0 +2023-03-09 08:54:44,111 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-09 08:54:57,848 INFO [train.py:898] (2/4) Epoch 17, batch 350, loss[loss=0.1454, simple_loss=0.2343, pruned_loss=0.02823, over 18543.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2601, pruned_loss=0.04168, over 2980069.72 frames. ], batch size: 49, lr: 6.75e-03, grad_scale: 8.0 +2023-03-09 08:55:00,143 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58497.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:55:03,534 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58500.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:55:56,670 INFO [train.py:898] (2/4) Epoch 17, batch 400, loss[loss=0.2008, simple_loss=0.2892, pruned_loss=0.05622, over 18358.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2589, pruned_loss=0.04144, over 3114291.85 frames. ], batch size: 56, lr: 6.74e-03, grad_scale: 8.0 +2023-03-09 08:56:07,568 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-03-09 08:56:18,014 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.966e+02 2.743e+02 3.133e+02 4.342e+02 9.924e+02, threshold=6.265e+02, percent-clipped=3.0 +2023-03-09 08:56:54,601 INFO [train.py:898] (2/4) Epoch 17, batch 450, loss[loss=0.179, simple_loss=0.2727, pruned_loss=0.04262, over 18011.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2593, pruned_loss=0.04164, over 3224495.13 frames. ], batch size: 65, lr: 6.74e-03, grad_scale: 8.0 +2023-03-09 08:57:40,626 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5757, 5.4908, 5.1200, 5.4460, 5.4300, 4.8013, 5.3201, 5.0923], + device='cuda:2'), covar=tensor([0.0354, 0.0425, 0.1236, 0.0806, 0.0545, 0.0396, 0.0428, 0.1025], + device='cuda:2'), in_proj_covar=tensor([0.0454, 0.0521, 0.0668, 0.0417, 0.0408, 0.0476, 0.0507, 0.0639], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 08:57:52,495 INFO [train.py:898] (2/4) Epoch 17, batch 500, loss[loss=0.2005, simple_loss=0.2888, pruned_loss=0.05614, over 17717.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2589, pruned_loss=0.04151, over 3302108.93 frames. ], batch size: 70, lr: 6.74e-03, grad_scale: 8.0 +2023-03-09 08:58:01,037 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.30 vs. limit=5.0 +2023-03-09 08:58:13,805 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.815e+02 2.817e+02 3.097e+02 3.778e+02 7.071e+02, threshold=6.194e+02, percent-clipped=1.0 +2023-03-09 08:58:33,077 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58681.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:58:49,786 INFO [train.py:898] (2/4) Epoch 17, batch 550, loss[loss=0.1682, simple_loss=0.2558, pruned_loss=0.04029, over 18375.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2579, pruned_loss=0.04123, over 3370015.22 frames. ], batch size: 46, lr: 6.73e-03, grad_scale: 8.0 +2023-03-09 08:59:04,771 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-09 08:59:44,822 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58742.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 08:59:47,859 INFO [train.py:898] (2/4) Epoch 17, batch 600, loss[loss=0.1722, simple_loss=0.2627, pruned_loss=0.04085, over 18024.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2573, pruned_loss=0.0412, over 3425788.98 frames. ], batch size: 65, lr: 6.73e-03, grad_scale: 8.0 +2023-03-09 09:00:09,620 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.181e+02 2.820e+02 3.241e+02 3.843e+02 6.469e+02, threshold=6.481e+02, percent-clipped=2.0 +2023-03-09 09:00:15,976 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.41 vs. limit=5.0 +2023-03-09 09:00:17,801 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7607, 4.7322, 4.4116, 4.6588, 4.6908, 4.0667, 4.5820, 4.3850], + device='cuda:2'), covar=tensor([0.0401, 0.0446, 0.1142, 0.0790, 0.0527, 0.0443, 0.0428, 0.1082], + device='cuda:2'), in_proj_covar=tensor([0.0454, 0.0519, 0.0665, 0.0413, 0.0406, 0.0474, 0.0506, 0.0636], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 09:00:22,459 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58775.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:00:45,231 INFO [train.py:898] (2/4) Epoch 17, batch 650, loss[loss=0.1745, simple_loss=0.2691, pruned_loss=0.03996, over 16016.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.258, pruned_loss=0.0412, over 3458844.11 frames. ], batch size: 94, lr: 6.73e-03, grad_scale: 8.0 +2023-03-09 09:00:48,323 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58797.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:00:52,705 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58800.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:01:17,489 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1341, 5.0771, 5.1916, 4.9614, 4.9228, 4.9410, 5.3119, 5.2842], + device='cuda:2'), covar=tensor([0.0061, 0.0051, 0.0043, 0.0091, 0.0054, 0.0146, 0.0063, 0.0088], + device='cuda:2'), in_proj_covar=tensor([0.0087, 0.0064, 0.0067, 0.0086, 0.0069, 0.0096, 0.0080, 0.0081], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003], + device='cuda:2') +2023-03-09 09:01:34,562 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58836.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:01:44,383 INFO [train.py:898] (2/4) Epoch 17, batch 700, loss[loss=0.1889, simple_loss=0.2792, pruned_loss=0.04935, over 16162.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2576, pruned_loss=0.04105, over 3500660.73 frames. ], batch size: 94, lr: 6.73e-03, grad_scale: 8.0 +2023-03-09 09:01:44,610 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58845.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:01:48,407 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=58848.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:01:48,906 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-09 09:02:07,827 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.002e+02 2.825e+02 3.273e+02 3.706e+02 6.863e+02, threshold=6.547e+02, percent-clipped=2.0 +2023-03-09 09:02:09,448 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.52 vs. limit=5.0 +2023-03-09 09:02:42,568 INFO [train.py:898] (2/4) Epoch 17, batch 750, loss[loss=0.1762, simple_loss=0.2689, pruned_loss=0.04174, over 17212.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2577, pruned_loss=0.04092, over 3522639.19 frames. ], batch size: 78, lr: 6.72e-03, grad_scale: 8.0 +2023-03-09 09:02:56,583 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58906.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:03:40,647 INFO [train.py:898] (2/4) Epoch 17, batch 800, loss[loss=0.1941, simple_loss=0.285, pruned_loss=0.05155, over 18353.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2589, pruned_loss=0.04148, over 3520705.32 frames. ], batch size: 55, lr: 6.72e-03, grad_scale: 8.0 +2023-03-09 09:04:04,577 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.057e+02 2.797e+02 3.307e+02 3.815e+02 9.263e+02, threshold=6.613e+02, percent-clipped=2.0 +2023-03-09 09:04:08,312 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58967.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:04:21,021 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-03-09 09:04:29,997 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5060, 6.0999, 5.5520, 5.8516, 5.6797, 5.4852, 6.1033, 6.0988], + device='cuda:2'), covar=tensor([0.1137, 0.0574, 0.0382, 0.0615, 0.1232, 0.0694, 0.0543, 0.0584], + device='cuda:2'), in_proj_covar=tensor([0.0587, 0.0502, 0.0366, 0.0530, 0.0720, 0.0525, 0.0710, 0.0533], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 09:04:38,774 INFO [train.py:898] (2/4) Epoch 17, batch 850, loss[loss=0.1481, simple_loss=0.2268, pruned_loss=0.03467, over 17797.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2588, pruned_loss=0.04161, over 3538012.36 frames. ], batch size: 39, lr: 6.72e-03, grad_scale: 8.0 +2023-03-09 09:04:40,737 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.14 vs. limit=5.0 +2023-03-09 09:04:42,462 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9608, 5.4969, 5.5267, 5.4525, 5.0433, 5.4137, 4.7955, 5.3985], + device='cuda:2'), covar=tensor([0.0239, 0.0235, 0.0169, 0.0393, 0.0379, 0.0216, 0.1085, 0.0292], + device='cuda:2'), in_proj_covar=tensor([0.0203, 0.0248, 0.0240, 0.0305, 0.0257, 0.0251, 0.0300, 0.0244], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 09:05:28,790 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59037.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:05:37,752 INFO [train.py:898] (2/4) Epoch 17, batch 900, loss[loss=0.1789, simple_loss=0.2599, pruned_loss=0.04889, over 17803.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2587, pruned_loss=0.04167, over 3548478.65 frames. ], batch size: 70, lr: 6.71e-03, grad_scale: 8.0 +2023-03-09 09:05:59,621 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.891e+02 2.805e+02 3.225e+02 3.970e+02 5.951e+02, threshold=6.451e+02, percent-clipped=0.0 +2023-03-09 09:06:36,350 INFO [train.py:898] (2/4) Epoch 17, batch 950, loss[loss=0.1647, simple_loss=0.2562, pruned_loss=0.03658, over 18504.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2582, pruned_loss=0.04149, over 3556409.48 frames. ], batch size: 51, lr: 6.71e-03, grad_scale: 8.0 +2023-03-09 09:07:14,556 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-09 09:07:17,777 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5886, 2.1449, 2.5258, 2.6525, 3.0541, 4.6009, 4.4571, 3.3047], + device='cuda:2'), covar=tensor([0.1668, 0.2473, 0.2836, 0.1741, 0.2289, 0.0241, 0.0387, 0.0856], + device='cuda:2'), in_proj_covar=tensor([0.0276, 0.0331, 0.0360, 0.0265, 0.0378, 0.0222, 0.0285, 0.0236], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 09:07:19,901 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59131.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:07:23,795 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.59 vs. limit=2.0 +2023-03-09 09:07:35,514 INFO [train.py:898] (2/4) Epoch 17, batch 1000, loss[loss=0.1584, simple_loss=0.2543, pruned_loss=0.0313, over 18329.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.259, pruned_loss=0.04187, over 3556687.06 frames. ], batch size: 55, lr: 6.71e-03, grad_scale: 16.0 +2023-03-09 09:07:56,831 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.800e+02 2.712e+02 3.112e+02 3.820e+02 1.157e+03, threshold=6.224e+02, percent-clipped=3.0 +2023-03-09 09:07:57,143 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59164.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:08:33,626 INFO [train.py:898] (2/4) Epoch 17, batch 1050, loss[loss=0.182, simple_loss=0.2742, pruned_loss=0.04491, over 18229.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2595, pruned_loss=0.04186, over 3569885.51 frames. ], batch size: 60, lr: 6.71e-03, grad_scale: 16.0 +2023-03-09 09:08:43,551 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0198, 3.4529, 2.5872, 3.3256, 4.0951, 2.6033, 3.3603, 3.4729], + device='cuda:2'), covar=tensor([0.0175, 0.1016, 0.1297, 0.0656, 0.0111, 0.1059, 0.0631, 0.0633], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0256, 0.0196, 0.0189, 0.0108, 0.0176, 0.0208, 0.0215], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 09:08:51,255 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8410, 4.8297, 4.9216, 4.6089, 4.6964, 4.6696, 5.0469, 5.0385], + device='cuda:2'), covar=tensor([0.0072, 0.0065, 0.0066, 0.0112, 0.0062, 0.0159, 0.0064, 0.0099], + device='cuda:2'), in_proj_covar=tensor([0.0087, 0.0064, 0.0068, 0.0087, 0.0070, 0.0097, 0.0081, 0.0081], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003], + device='cuda:2') +2023-03-09 09:09:06,570 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4362, 5.9907, 5.4857, 5.7520, 5.5913, 5.4137, 6.0330, 5.9766], + device='cuda:2'), covar=tensor([0.1200, 0.0690, 0.0453, 0.0673, 0.1358, 0.0709, 0.0560, 0.0641], + device='cuda:2'), in_proj_covar=tensor([0.0588, 0.0504, 0.0368, 0.0531, 0.0722, 0.0527, 0.0710, 0.0534], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 09:09:08,997 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59225.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:09:09,206 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-03-09 09:09:32,446 INFO [train.py:898] (2/4) Epoch 17, batch 1100, loss[loss=0.1772, simple_loss=0.2667, pruned_loss=0.04381, over 18451.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2593, pruned_loss=0.04176, over 3571365.93 frames. ], batch size: 59, lr: 6.70e-03, grad_scale: 16.0 +2023-03-09 09:09:52,005 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59262.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:09:54,097 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.737e+02 2.749e+02 3.337e+02 4.015e+02 7.609e+02, threshold=6.673e+02, percent-clipped=2.0 +2023-03-09 09:10:31,701 INFO [train.py:898] (2/4) Epoch 17, batch 1150, loss[loss=0.1868, simple_loss=0.2794, pruned_loss=0.04707, over 18399.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2589, pruned_loss=0.04144, over 3583400.35 frames. ], batch size: 52, lr: 6.70e-03, grad_scale: 16.0 +2023-03-09 09:11:19,762 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59337.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:11:29,505 INFO [train.py:898] (2/4) Epoch 17, batch 1200, loss[loss=0.1706, simple_loss=0.2633, pruned_loss=0.03898, over 18356.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2582, pruned_loss=0.04136, over 3597504.39 frames. ], batch size: 46, lr: 6.70e-03, grad_scale: 16.0 +2023-03-09 09:11:42,549 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6775, 3.6423, 4.9392, 4.2316, 3.2060, 2.9520, 4.1852, 5.1827], + device='cuda:2'), covar=tensor([0.0821, 0.1467, 0.0156, 0.0398, 0.0924, 0.1176, 0.0402, 0.0329], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0262, 0.0132, 0.0173, 0.0184, 0.0184, 0.0186, 0.0177], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 09:11:51,078 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.028e+02 2.851e+02 3.293e+02 4.093e+02 6.542e+02, threshold=6.585e+02, percent-clipped=0.0 +2023-03-09 09:12:15,542 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59385.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:12:25,283 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5424, 2.2401, 2.6105, 2.5806, 3.2086, 4.8429, 4.6640, 3.3665], + device='cuda:2'), covar=tensor([0.1588, 0.2306, 0.2761, 0.1682, 0.2122, 0.0172, 0.0345, 0.0793], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0334, 0.0362, 0.0265, 0.0379, 0.0223, 0.0286, 0.0236], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 09:12:28,175 INFO [train.py:898] (2/4) Epoch 17, batch 1250, loss[loss=0.1794, simple_loss=0.2784, pruned_loss=0.04014, over 18284.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2585, pruned_loss=0.04136, over 3591938.03 frames. ], batch size: 57, lr: 6.69e-03, grad_scale: 8.0 +2023-03-09 09:13:05,719 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.7125, 6.2004, 5.6627, 6.0540, 5.8390, 5.7822, 6.3255, 6.2373], + device='cuda:2'), covar=tensor([0.1149, 0.0755, 0.0347, 0.0676, 0.1493, 0.0611, 0.0578, 0.0658], + device='cuda:2'), in_proj_covar=tensor([0.0587, 0.0504, 0.0366, 0.0527, 0.0722, 0.0524, 0.0706, 0.0531], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 09:13:09,126 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59431.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:13:21,960 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6552, 2.2315, 2.5430, 2.6914, 3.2493, 4.9211, 4.7296, 3.4322], + device='cuda:2'), covar=tensor([0.1663, 0.2431, 0.2824, 0.1681, 0.2262, 0.0190, 0.0363, 0.0853], + device='cuda:2'), in_proj_covar=tensor([0.0279, 0.0334, 0.0363, 0.0266, 0.0381, 0.0223, 0.0286, 0.0237], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 09:13:26,465 INFO [train.py:898] (2/4) Epoch 17, batch 1300, loss[loss=0.1651, simple_loss=0.2517, pruned_loss=0.03924, over 18346.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2576, pruned_loss=0.04129, over 3597765.47 frames. ], batch size: 46, lr: 6.69e-03, grad_scale: 8.0 +2023-03-09 09:13:48,930 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.812e+02 2.789e+02 3.310e+02 4.090e+02 6.733e+02, threshold=6.621e+02, percent-clipped=2.0 +2023-03-09 09:14:05,082 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59479.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:14:06,363 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5849, 6.0910, 5.5153, 5.9476, 5.7207, 5.6902, 6.1989, 6.1467], + device='cuda:2'), covar=tensor([0.1157, 0.0675, 0.0438, 0.0658, 0.1258, 0.0642, 0.0500, 0.0557], + device='cuda:2'), in_proj_covar=tensor([0.0585, 0.0502, 0.0364, 0.0524, 0.0721, 0.0523, 0.0704, 0.0530], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 09:14:24,116 INFO [train.py:898] (2/4) Epoch 17, batch 1350, loss[loss=0.1602, simple_loss=0.2535, pruned_loss=0.03349, over 18093.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2578, pruned_loss=0.04126, over 3598603.75 frames. ], batch size: 62, lr: 6.69e-03, grad_scale: 8.0 +2023-03-09 09:14:33,560 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59502.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:14:47,685 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-09 09:14:53,671 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59520.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:15:17,238 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.25 vs. limit=5.0 +2023-03-09 09:15:22,942 INFO [train.py:898] (2/4) Epoch 17, batch 1400, loss[loss=0.1503, simple_loss=0.2283, pruned_loss=0.03613, over 16854.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.258, pruned_loss=0.04124, over 3595709.77 frames. ], batch size: 37, lr: 6.69e-03, grad_scale: 8.0 +2023-03-09 09:15:43,360 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59562.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:15:44,578 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59563.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:15:46,381 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.823e+02 2.959e+02 3.481e+02 4.444e+02 9.729e+02, threshold=6.962e+02, percent-clipped=6.0 +2023-03-09 09:16:19,255 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5628, 3.3644, 2.1553, 4.3480, 3.1181, 4.1809, 2.3851, 3.8714], + device='cuda:2'), covar=tensor([0.0557, 0.0770, 0.1439, 0.0399, 0.0777, 0.0306, 0.1167, 0.0427], + device='cuda:2'), in_proj_covar=tensor([0.0207, 0.0216, 0.0184, 0.0267, 0.0187, 0.0261, 0.0199, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 09:16:21,022 INFO [train.py:898] (2/4) Epoch 17, batch 1450, loss[loss=0.1689, simple_loss=0.2637, pruned_loss=0.03704, over 18153.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2578, pruned_loss=0.04109, over 3595336.11 frames. ], batch size: 62, lr: 6.68e-03, grad_scale: 8.0 +2023-03-09 09:16:28,538 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-09 09:16:40,252 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59610.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:16:41,537 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5345, 5.4869, 5.1015, 5.4305, 5.4235, 4.8315, 5.3750, 5.0901], + device='cuda:2'), covar=tensor([0.0388, 0.0403, 0.1330, 0.0740, 0.0509, 0.0425, 0.0369, 0.1025], + device='cuda:2'), in_proj_covar=tensor([0.0462, 0.0521, 0.0676, 0.0414, 0.0412, 0.0482, 0.0509, 0.0642], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 09:17:20,455 INFO [train.py:898] (2/4) Epoch 17, batch 1500, loss[loss=0.1763, simple_loss=0.2707, pruned_loss=0.04096, over 18253.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2593, pruned_loss=0.04172, over 3588889.76 frames. ], batch size: 60, lr: 6.68e-03, grad_scale: 8.0 +2023-03-09 09:17:29,417 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8427, 5.0764, 2.5572, 4.8769, 4.8251, 5.0786, 4.9347, 2.6551], + device='cuda:2'), covar=tensor([0.0200, 0.0061, 0.0805, 0.0085, 0.0069, 0.0062, 0.0075, 0.0979], + device='cuda:2'), in_proj_covar=tensor([0.0083, 0.0076, 0.0093, 0.0089, 0.0082, 0.0071, 0.0081, 0.0093], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 09:17:44,118 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.960e+02 2.886e+02 3.557e+02 4.310e+02 1.324e+03, threshold=7.115e+02, percent-clipped=4.0 +2023-03-09 09:17:48,296 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-09 09:18:02,894 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8746, 4.6007, 4.7141, 3.5752, 3.8412, 3.5646, 2.6864, 2.5956], + device='cuda:2'), covar=tensor([0.0219, 0.0164, 0.0084, 0.0264, 0.0309, 0.0208, 0.0729, 0.0841], + device='cuda:2'), in_proj_covar=tensor([0.0067, 0.0055, 0.0058, 0.0065, 0.0087, 0.0063, 0.0076, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 09:18:18,466 INFO [train.py:898] (2/4) Epoch 17, batch 1550, loss[loss=0.1906, simple_loss=0.2746, pruned_loss=0.05326, over 16224.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2595, pruned_loss=0.04178, over 3590703.52 frames. ], batch size: 95, lr: 6.68e-03, grad_scale: 8.0 +2023-03-09 09:18:32,644 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.83 vs. limit=5.0 +2023-03-09 09:19:16,816 INFO [train.py:898] (2/4) Epoch 17, batch 1600, loss[loss=0.1658, simple_loss=0.2461, pruned_loss=0.04273, over 18361.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2589, pruned_loss=0.04138, over 3584787.47 frames. ], batch size: 46, lr: 6.67e-03, grad_scale: 8.0 +2023-03-09 09:19:41,445 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.908e+02 2.635e+02 3.092e+02 3.637e+02 7.676e+02, threshold=6.183e+02, percent-clipped=1.0 +2023-03-09 09:20:15,865 INFO [train.py:898] (2/4) Epoch 17, batch 1650, loss[loss=0.1496, simple_loss=0.2424, pruned_loss=0.02836, over 18360.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2586, pruned_loss=0.04139, over 3572356.15 frames. ], batch size: 46, lr: 6.67e-03, grad_scale: 8.0 +2023-03-09 09:20:46,584 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59820.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:21:14,504 INFO [train.py:898] (2/4) Epoch 17, batch 1700, loss[loss=0.1719, simple_loss=0.262, pruned_loss=0.04091, over 17127.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2588, pruned_loss=0.04161, over 3573392.44 frames. ], batch size: 78, lr: 6.67e-03, grad_scale: 8.0 +2023-03-09 09:21:18,252 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59848.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:21:19,430 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.6249, 6.1576, 5.6522, 5.9280, 5.7712, 5.6491, 6.2238, 6.1550], + device='cuda:2'), covar=tensor([0.1209, 0.0675, 0.0358, 0.0655, 0.1293, 0.0699, 0.0554, 0.0649], + device='cuda:2'), in_proj_covar=tensor([0.0585, 0.0500, 0.0365, 0.0523, 0.0722, 0.0523, 0.0708, 0.0534], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 09:21:31,003 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59858.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:21:38,856 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.932e+02 2.816e+02 3.196e+02 3.802e+02 1.399e+03, threshold=6.391e+02, percent-clipped=5.0 +2023-03-09 09:21:43,512 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=59868.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:21:55,829 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7812, 4.9576, 5.0027, 5.0740, 4.8126, 5.5886, 5.2902, 4.8924], + device='cuda:2'), covar=tensor([0.1184, 0.0711, 0.0806, 0.0703, 0.1400, 0.0744, 0.0649, 0.1694], + device='cuda:2'), in_proj_covar=tensor([0.0334, 0.0270, 0.0290, 0.0287, 0.0317, 0.0399, 0.0261, 0.0389], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:2') +2023-03-09 09:22:07,245 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59889.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:22:13,228 INFO [train.py:898] (2/4) Epoch 17, batch 1750, loss[loss=0.2043, simple_loss=0.2892, pruned_loss=0.05969, over 18485.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2587, pruned_loss=0.04146, over 3589490.60 frames. ], batch size: 59, lr: 6.67e-03, grad_scale: 8.0 +2023-03-09 09:22:30,589 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59909.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:22:32,744 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.4334, 4.8573, 4.5181, 4.6815, 4.5563, 4.5259, 4.9165, 4.8188], + device='cuda:2'), covar=tensor([0.1143, 0.0680, 0.1474, 0.0658, 0.1404, 0.0641, 0.0679, 0.0728], + device='cuda:2'), in_proj_covar=tensor([0.0586, 0.0499, 0.0367, 0.0523, 0.0723, 0.0524, 0.0710, 0.0534], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 09:22:48,383 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6649, 5.2320, 5.2138, 5.2146, 4.7060, 5.0828, 4.5525, 5.0812], + device='cuda:2'), covar=tensor([0.0253, 0.0285, 0.0199, 0.0406, 0.0435, 0.0220, 0.1084, 0.0300], + device='cuda:2'), in_proj_covar=tensor([0.0202, 0.0248, 0.0240, 0.0303, 0.0258, 0.0251, 0.0296, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 09:23:11,683 INFO [train.py:898] (2/4) Epoch 17, batch 1800, loss[loss=0.1525, simple_loss=0.2451, pruned_loss=0.02992, over 18268.00 frames. ], tot_loss[loss=0.171, simple_loss=0.259, pruned_loss=0.04156, over 3577667.73 frames. ], batch size: 49, lr: 6.66e-03, grad_scale: 8.0 +2023-03-09 09:23:17,878 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59950.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:23:35,356 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.961e+02 2.799e+02 3.065e+02 3.642e+02 5.911e+02, threshold=6.130e+02, percent-clipped=0.0 +2023-03-09 09:24:10,362 INFO [train.py:898] (2/4) Epoch 17, batch 1850, loss[loss=0.1821, simple_loss=0.2694, pruned_loss=0.04737, over 18339.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2594, pruned_loss=0.04174, over 3570834.10 frames. ], batch size: 56, lr: 6.66e-03, grad_scale: 8.0 +2023-03-09 09:25:08,357 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60040.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:25:13,972 INFO [train.py:898] (2/4) Epoch 17, batch 1900, loss[loss=0.1691, simple_loss=0.2599, pruned_loss=0.03912, over 18366.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2597, pruned_loss=0.04175, over 3574183.96 frames. ], batch size: 50, lr: 6.66e-03, grad_scale: 8.0 +2023-03-09 09:25:22,345 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60052.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:25:37,666 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.972e+02 2.751e+02 3.346e+02 4.320e+02 1.006e+03, threshold=6.692e+02, percent-clipped=5.0 +2023-03-09 09:25:38,018 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60065.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:25:43,925 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.04 vs. limit=5.0 +2023-03-09 09:25:53,061 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60078.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:26:12,274 INFO [train.py:898] (2/4) Epoch 17, batch 1950, loss[loss=0.141, simple_loss=0.2327, pruned_loss=0.02458, over 18555.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2595, pruned_loss=0.04168, over 3562774.61 frames. ], batch size: 45, lr: 6.66e-03, grad_scale: 8.0 +2023-03-09 09:26:19,409 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60101.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 09:26:33,392 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60113.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 09:26:48,363 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60126.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:26:51,146 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 09:27:03,134 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5498, 2.3291, 2.5052, 2.5792, 3.2205, 5.0281, 4.6519, 3.7424], + device='cuda:2'), covar=tensor([0.1700, 0.2341, 0.3108, 0.1849, 0.2332, 0.0173, 0.0406, 0.0698], + device='cuda:2'), in_proj_covar=tensor([0.0280, 0.0332, 0.0361, 0.0265, 0.0379, 0.0221, 0.0286, 0.0236], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 09:27:04,140 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60139.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:27:10,607 INFO [train.py:898] (2/4) Epoch 17, batch 2000, loss[loss=0.1631, simple_loss=0.2506, pruned_loss=0.03773, over 18362.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2594, pruned_loss=0.0415, over 3574682.43 frames. ], batch size: 46, lr: 6.65e-03, grad_scale: 8.0 +2023-03-09 09:27:14,434 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.34 vs. limit=5.0 +2023-03-09 09:27:25,315 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60158.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:27:33,337 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.223e+02 2.887e+02 3.368e+02 4.161e+02 9.381e+02, threshold=6.736e+02, percent-clipped=4.0 +2023-03-09 09:28:08,977 INFO [train.py:898] (2/4) Epoch 17, batch 2050, loss[loss=0.1854, simple_loss=0.2743, pruned_loss=0.04822, over 17817.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2599, pruned_loss=0.0415, over 3581227.17 frames. ], batch size: 70, lr: 6.65e-03, grad_scale: 8.0 +2023-03-09 09:28:12,793 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60198.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:28:19,719 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60204.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:28:21,879 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60206.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:29:07,879 INFO [train.py:898] (2/4) Epoch 17, batch 2100, loss[loss=0.1653, simple_loss=0.2529, pruned_loss=0.03887, over 18360.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2598, pruned_loss=0.04144, over 3585571.68 frames. ], batch size: 46, lr: 6.65e-03, grad_scale: 8.0 +2023-03-09 09:29:08,109 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4012, 5.2203, 5.6217, 5.5746, 5.3053, 6.1207, 5.7531, 5.4752], + device='cuda:2'), covar=tensor([0.1077, 0.0597, 0.0653, 0.0597, 0.1340, 0.0738, 0.0526, 0.1556], + device='cuda:2'), in_proj_covar=tensor([0.0337, 0.0272, 0.0292, 0.0289, 0.0320, 0.0403, 0.0263, 0.0391], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:2') +2023-03-09 09:29:08,122 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60245.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:29:24,151 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60259.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:29:30,558 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.013e+02 2.803e+02 3.288e+02 3.914e+02 1.145e+03, threshold=6.576e+02, percent-clipped=2.0 +2023-03-09 09:30:07,000 INFO [train.py:898] (2/4) Epoch 17, batch 2150, loss[loss=0.1888, simple_loss=0.2753, pruned_loss=0.05112, over 18139.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2599, pruned_loss=0.04128, over 3572443.40 frames. ], batch size: 62, lr: 6.64e-03, grad_scale: 8.0 +2023-03-09 09:30:28,616 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.78 vs. limit=2.0 +2023-03-09 09:31:03,785 INFO [train.py:898] (2/4) Epoch 17, batch 2200, loss[loss=0.172, simple_loss=0.2626, pruned_loss=0.04072, over 17204.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2605, pruned_loss=0.04153, over 3571467.85 frames. ], batch size: 78, lr: 6.64e-03, grad_scale: 8.0 +2023-03-09 09:31:26,299 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.966e+02 2.999e+02 3.869e+02 4.912e+02 1.337e+03, threshold=7.738e+02, percent-clipped=7.0 +2023-03-09 09:32:01,240 INFO [train.py:898] (2/4) Epoch 17, batch 2250, loss[loss=0.1526, simple_loss=0.2439, pruned_loss=0.03064, over 18519.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2599, pruned_loss=0.04162, over 3574739.77 frames. ], batch size: 49, lr: 6.64e-03, grad_scale: 4.0 +2023-03-09 09:32:02,634 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60396.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 09:32:16,886 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60408.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 09:32:31,493 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60421.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:32:47,212 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60434.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:32:57,542 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.55 vs. limit=5.0 +2023-03-09 09:32:58,978 INFO [train.py:898] (2/4) Epoch 17, batch 2300, loss[loss=0.1619, simple_loss=0.2537, pruned_loss=0.03502, over 18380.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2597, pruned_loss=0.04132, over 3583521.14 frames. ], batch size: 50, lr: 6.64e-03, grad_scale: 4.0 +2023-03-09 09:33:14,205 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.76 vs. limit=5.0 +2023-03-09 09:33:23,538 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.591e+02 2.714e+02 3.247e+02 3.774e+02 7.565e+02, threshold=6.493e+02, percent-clipped=0.0 +2023-03-09 09:33:49,150 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-09 09:33:57,583 INFO [train.py:898] (2/4) Epoch 17, batch 2350, loss[loss=0.1686, simple_loss=0.2578, pruned_loss=0.03972, over 18365.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2597, pruned_loss=0.04158, over 3578685.24 frames. ], batch size: 50, lr: 6.63e-03, grad_scale: 4.0 +2023-03-09 09:34:09,167 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60504.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:34:56,306 INFO [train.py:898] (2/4) Epoch 17, batch 2400, loss[loss=0.1813, simple_loss=0.27, pruned_loss=0.04626, over 18362.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2594, pruned_loss=0.04171, over 3568931.75 frames. ], batch size: 56, lr: 6.63e-03, grad_scale: 8.0 +2023-03-09 09:34:56,664 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60545.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:34:58,975 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3051, 2.5629, 2.4079, 2.7767, 3.2935, 3.2202, 2.9158, 2.7291], + device='cuda:2'), covar=tensor([0.0198, 0.0291, 0.0559, 0.0413, 0.0232, 0.0158, 0.0407, 0.0416], + device='cuda:2'), in_proj_covar=tensor([0.0134, 0.0127, 0.0161, 0.0152, 0.0121, 0.0106, 0.0151, 0.0148], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 09:35:02,327 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5230, 6.1005, 5.5296, 5.8768, 5.7011, 5.4983, 6.1738, 6.1240], + device='cuda:2'), covar=tensor([0.1307, 0.0704, 0.0479, 0.0672, 0.1299, 0.0734, 0.0498, 0.0582], + device='cuda:2'), in_proj_covar=tensor([0.0588, 0.0503, 0.0368, 0.0522, 0.0717, 0.0524, 0.0706, 0.0527], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 09:35:05,013 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60552.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:35:07,292 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60554.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:35:20,248 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.011e+02 2.942e+02 3.643e+02 4.208e+02 9.657e+02, threshold=7.287e+02, percent-clipped=4.0 +2023-03-09 09:35:52,019 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60593.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:35:54,157 INFO [train.py:898] (2/4) Epoch 17, batch 2450, loss[loss=0.1725, simple_loss=0.2685, pruned_loss=0.03824, over 17831.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2592, pruned_loss=0.04138, over 3577211.05 frames. ], batch size: 70, lr: 6.63e-03, grad_scale: 8.0 +2023-03-09 09:36:52,613 INFO [train.py:898] (2/4) Epoch 17, batch 2500, loss[loss=0.1649, simple_loss=0.2554, pruned_loss=0.03718, over 18318.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2597, pruned_loss=0.04134, over 3577124.35 frames. ], batch size: 54, lr: 6.63e-03, grad_scale: 8.0 +2023-03-09 09:37:17,220 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.797e+02 2.798e+02 3.205e+02 3.786e+02 6.512e+02, threshold=6.411e+02, percent-clipped=0.0 +2023-03-09 09:37:22,249 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0618, 5.5028, 2.7647, 5.3435, 5.2258, 5.5354, 5.2969, 2.7782], + device='cuda:2'), covar=tensor([0.0202, 0.0069, 0.0807, 0.0071, 0.0075, 0.0069, 0.0100, 0.0991], + device='cuda:2'), in_proj_covar=tensor([0.0084, 0.0078, 0.0094, 0.0091, 0.0084, 0.0072, 0.0083, 0.0095], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 09:37:28,912 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9148, 5.4309, 5.4138, 5.4021, 4.9697, 5.3257, 4.7434, 5.3483], + device='cuda:2'), covar=tensor([0.0204, 0.0244, 0.0165, 0.0259, 0.0332, 0.0175, 0.0981, 0.0233], + device='cuda:2'), in_proj_covar=tensor([0.0203, 0.0249, 0.0241, 0.0305, 0.0257, 0.0253, 0.0299, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 09:37:51,493 INFO [train.py:898] (2/4) Epoch 17, batch 2550, loss[loss=0.1674, simple_loss=0.2639, pruned_loss=0.03542, over 18620.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2593, pruned_loss=0.04116, over 3582404.84 frames. ], batch size: 52, lr: 6.62e-03, grad_scale: 8.0 +2023-03-09 09:37:52,996 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60696.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:38:06,241 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60708.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:38:18,154 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60718.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:38:21,162 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60721.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:38:22,566 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1791, 4.2193, 2.5387, 4.2385, 5.3223, 2.8693, 4.2276, 4.0192], + device='cuda:2'), covar=tensor([0.0140, 0.1288, 0.1538, 0.0527, 0.0064, 0.1024, 0.0499, 0.0683], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0260, 0.0201, 0.0193, 0.0111, 0.0180, 0.0212, 0.0220], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 09:38:36,000 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60734.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:38:47,886 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60744.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:38:48,821 INFO [train.py:898] (2/4) Epoch 17, batch 2600, loss[loss=0.1736, simple_loss=0.2659, pruned_loss=0.04069, over 18476.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2592, pruned_loss=0.04118, over 3596115.64 frames. ], batch size: 53, lr: 6.62e-03, grad_scale: 8.0 +2023-03-09 09:39:01,917 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60756.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:39:13,465 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.006e+02 2.726e+02 3.213e+02 3.687e+02 6.855e+02, threshold=6.427e+02, percent-clipped=2.0 +2023-03-09 09:39:16,910 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60769.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:39:28,278 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60779.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:39:31,186 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60782.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:39:45,259 INFO [train.py:898] (2/4) Epoch 17, batch 2650, loss[loss=0.2008, simple_loss=0.2897, pruned_loss=0.05596, over 18090.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2593, pruned_loss=0.04158, over 3592718.66 frames. ], batch size: 62, lr: 6.62e-03, grad_scale: 8.0 +2023-03-09 09:40:08,184 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0889, 5.0652, 4.7253, 5.0398, 5.0196, 4.3689, 4.9164, 4.6923], + device='cuda:2'), covar=tensor([0.0427, 0.0457, 0.1287, 0.0698, 0.0567, 0.0490, 0.0466, 0.1082], + device='cuda:2'), in_proj_covar=tensor([0.0459, 0.0522, 0.0672, 0.0412, 0.0417, 0.0482, 0.0512, 0.0640], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 09:40:43,826 INFO [train.py:898] (2/4) Epoch 17, batch 2700, loss[loss=0.1759, simple_loss=0.2625, pruned_loss=0.04462, over 18636.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2592, pruned_loss=0.04154, over 3588943.10 frames. ], batch size: 52, lr: 6.61e-03, grad_scale: 8.0 +2023-03-09 09:40:54,830 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60854.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:41:08,391 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.851e+02 2.950e+02 3.306e+02 4.033e+02 9.458e+02, threshold=6.612e+02, percent-clipped=5.0 +2023-03-09 09:41:15,038 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1221, 4.2761, 2.6318, 4.0686, 5.2685, 2.4454, 3.9470, 4.0515], + device='cuda:2'), covar=tensor([0.0119, 0.0951, 0.1497, 0.0629, 0.0074, 0.1250, 0.0650, 0.0627], + device='cuda:2'), in_proj_covar=tensor([0.0151, 0.0261, 0.0201, 0.0194, 0.0111, 0.0180, 0.0213, 0.0219], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 09:41:42,542 INFO [train.py:898] (2/4) Epoch 17, batch 2750, loss[loss=0.1702, simple_loss=0.2578, pruned_loss=0.04126, over 18634.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2585, pruned_loss=0.04114, over 3599195.49 frames. ], batch size: 52, lr: 6.61e-03, grad_scale: 8.0 +2023-03-09 09:41:51,126 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=60902.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:42:39,348 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7415, 5.3433, 5.3023, 5.3018, 4.8214, 5.1744, 4.5844, 5.1775], + device='cuda:2'), covar=tensor([0.0266, 0.0287, 0.0190, 0.0412, 0.0402, 0.0222, 0.1123, 0.0353], + device='cuda:2'), in_proj_covar=tensor([0.0204, 0.0250, 0.0242, 0.0306, 0.0258, 0.0253, 0.0297, 0.0247], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 09:42:41,195 INFO [train.py:898] (2/4) Epoch 17, batch 2800, loss[loss=0.1846, simple_loss=0.2765, pruned_loss=0.04632, over 18296.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2589, pruned_loss=0.04105, over 3599840.25 frames. ], batch size: 54, lr: 6.61e-03, grad_scale: 8.0 +2023-03-09 09:43:06,401 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.038e+02 2.738e+02 3.235e+02 3.720e+02 7.893e+02, threshold=6.471e+02, percent-clipped=2.0 +2023-03-09 09:43:16,085 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-09 09:43:40,113 INFO [train.py:898] (2/4) Epoch 17, batch 2850, loss[loss=0.15, simple_loss=0.24, pruned_loss=0.03, over 18530.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2593, pruned_loss=0.04135, over 3587587.69 frames. ], batch size: 49, lr: 6.61e-03, grad_scale: 8.0 +2023-03-09 09:43:53,436 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7512, 3.6296, 4.8305, 4.2134, 3.1939, 2.9317, 4.2842, 5.0909], + device='cuda:2'), covar=tensor([0.0792, 0.1482, 0.0208, 0.0443, 0.0957, 0.1243, 0.0412, 0.0181], + device='cuda:2'), in_proj_covar=tensor([0.0145, 0.0265, 0.0137, 0.0176, 0.0186, 0.0187, 0.0188, 0.0181], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 09:44:27,396 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7345, 3.6187, 3.5260, 3.1587, 3.4058, 2.8116, 2.7518, 3.7039], + device='cuda:2'), covar=tensor([0.0070, 0.0111, 0.0082, 0.0134, 0.0110, 0.0203, 0.0209, 0.0069], + device='cuda:2'), in_proj_covar=tensor([0.0127, 0.0150, 0.0129, 0.0180, 0.0133, 0.0172, 0.0176, 0.0111], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 09:44:39,448 INFO [train.py:898] (2/4) Epoch 17, batch 2900, loss[loss=0.162, simple_loss=0.2521, pruned_loss=0.03598, over 18510.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2585, pruned_loss=0.04113, over 3589266.02 frames. ], batch size: 47, lr: 6.60e-03, grad_scale: 8.0 +2023-03-09 09:44:58,345 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61061.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:45:04,193 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.725e+02 2.779e+02 3.469e+02 4.268e+02 9.879e+02, threshold=6.938e+02, percent-clipped=3.0 +2023-03-09 09:45:13,448 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61074.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:45:34,721 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6391, 3.5616, 3.4760, 3.0097, 3.3545, 2.7039, 2.6068, 3.6759], + device='cuda:2'), covar=tensor([0.0054, 0.0082, 0.0075, 0.0128, 0.0088, 0.0185, 0.0197, 0.0058], + device='cuda:2'), in_proj_covar=tensor([0.0126, 0.0149, 0.0128, 0.0179, 0.0132, 0.0172, 0.0175, 0.0111], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 09:45:37,767 INFO [train.py:898] (2/4) Epoch 17, batch 2950, loss[loss=0.1737, simple_loss=0.2648, pruned_loss=0.04127, over 18314.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2589, pruned_loss=0.04127, over 3592131.55 frames. ], batch size: 57, lr: 6.60e-03, grad_scale: 4.0 +2023-03-09 09:45:59,979 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9864, 4.5688, 4.2134, 4.3165, 4.0770, 4.7713, 4.5049, 4.1554], + device='cuda:2'), covar=tensor([0.1514, 0.1147, 0.1079, 0.0838, 0.1849, 0.1192, 0.0720, 0.1939], + device='cuda:2'), in_proj_covar=tensor([0.0332, 0.0266, 0.0289, 0.0285, 0.0314, 0.0396, 0.0258, 0.0385], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:2') +2023-03-09 09:46:09,537 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61122.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:46:36,107 INFO [train.py:898] (2/4) Epoch 17, batch 3000, loss[loss=0.1744, simple_loss=0.2635, pruned_loss=0.0426, over 18277.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2591, pruned_loss=0.04104, over 3588751.82 frames. ], batch size: 57, lr: 6.60e-03, grad_scale: 4.0 +2023-03-09 09:46:36,107 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 09:46:44,484 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1587, 4.4202, 2.8780, 4.3739, 5.3349, 3.0282, 4.2717, 4.2384], + device='cuda:2'), covar=tensor([0.0127, 0.0882, 0.1401, 0.0534, 0.0066, 0.1039, 0.0532, 0.0587], + device='cuda:2'), in_proj_covar=tensor([0.0151, 0.0261, 0.0201, 0.0193, 0.0112, 0.0181, 0.0214, 0.0221], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 09:46:48,250 INFO [train.py:932] (2/4) Epoch 17, validation: loss=0.1521, simple_loss=0.2525, pruned_loss=0.02589, over 944034.00 frames. +2023-03-09 09:46:48,251 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 09:47:14,275 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.190e+02 3.156e+02 3.810e+02 4.470e+02 9.676e+02, threshold=7.619e+02, percent-clipped=4.0 +2023-03-09 09:47:46,762 INFO [train.py:898] (2/4) Epoch 17, batch 3050, loss[loss=0.1804, simple_loss=0.2688, pruned_loss=0.04598, over 18563.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2596, pruned_loss=0.04109, over 3589804.55 frames. ], batch size: 54, lr: 6.60e-03, grad_scale: 4.0 +2023-03-09 09:48:44,416 INFO [train.py:898] (2/4) Epoch 17, batch 3100, loss[loss=0.1712, simple_loss=0.2605, pruned_loss=0.04097, over 18386.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2602, pruned_loss=0.04157, over 3590586.44 frames. ], batch size: 50, lr: 6.59e-03, grad_scale: 4.0 +2023-03-09 09:49:09,874 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.926e+02 2.710e+02 3.175e+02 4.043e+02 1.016e+03, threshold=6.350e+02, percent-clipped=3.0 +2023-03-09 09:49:42,499 INFO [train.py:898] (2/4) Epoch 17, batch 3150, loss[loss=0.1727, simple_loss=0.2658, pruned_loss=0.03978, over 18619.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2589, pruned_loss=0.04127, over 3599814.81 frames. ], batch size: 52, lr: 6.59e-03, grad_scale: 4.0 +2023-03-09 09:49:52,186 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-03-09 09:50:28,967 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1983, 4.0920, 5.2905, 2.9486, 4.6827, 2.7782, 3.1961, 2.0397], + device='cuda:2'), covar=tensor([0.0876, 0.0715, 0.0111, 0.0801, 0.0507, 0.2308, 0.2572, 0.1961], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0236, 0.0164, 0.0187, 0.0247, 0.0262, 0.0313, 0.0227], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 09:50:40,781 INFO [train.py:898] (2/4) Epoch 17, batch 3200, loss[loss=0.1797, simple_loss=0.2712, pruned_loss=0.04412, over 18404.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2589, pruned_loss=0.04149, over 3592885.55 frames. ], batch size: 52, lr: 6.59e-03, grad_scale: 8.0 +2023-03-09 09:51:06,763 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.872e+02 2.721e+02 3.197e+02 3.884e+02 6.601e+02, threshold=6.394e+02, percent-clipped=2.0 +2023-03-09 09:51:14,910 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=61374.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:51:21,955 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8949, 3.7549, 5.0428, 2.7036, 4.5010, 2.6694, 3.1326, 1.8869], + device='cuda:2'), covar=tensor([0.1057, 0.0848, 0.0127, 0.0900, 0.0532, 0.2315, 0.2439, 0.1969], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0236, 0.0164, 0.0187, 0.0247, 0.0263, 0.0314, 0.0228], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 09:51:23,363 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-09 09:51:38,942 INFO [train.py:898] (2/4) Epoch 17, batch 3250, loss[loss=0.1618, simple_loss=0.2515, pruned_loss=0.03604, over 18379.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.259, pruned_loss=0.0413, over 3597556.38 frames. ], batch size: 50, lr: 6.59e-03, grad_scale: 8.0 +2023-03-09 09:51:59,901 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8959, 4.5928, 4.6505, 3.2362, 3.7263, 3.4613, 2.6144, 2.3696], + device='cuda:2'), covar=tensor([0.0178, 0.0109, 0.0052, 0.0312, 0.0283, 0.0187, 0.0687, 0.0867], + device='cuda:2'), in_proj_covar=tensor([0.0066, 0.0055, 0.0058, 0.0065, 0.0086, 0.0062, 0.0076, 0.0081], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 09:52:05,257 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61417.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:52:10,907 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=61422.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:52:24,333 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0531, 4.1119, 2.4708, 4.1399, 5.1963, 2.4744, 3.9094, 4.1433], + device='cuda:2'), covar=tensor([0.0145, 0.1113, 0.1592, 0.0575, 0.0075, 0.1246, 0.0638, 0.0616], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0257, 0.0198, 0.0191, 0.0111, 0.0178, 0.0211, 0.0218], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 09:52:37,388 INFO [train.py:898] (2/4) Epoch 17, batch 3300, loss[loss=0.1747, simple_loss=0.2594, pruned_loss=0.04504, over 18494.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2586, pruned_loss=0.0411, over 3608923.28 frames. ], batch size: 51, lr: 6.58e-03, grad_scale: 8.0 +2023-03-09 09:53:01,553 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4233, 2.6577, 2.3915, 2.7674, 3.4469, 3.3559, 2.9323, 2.8680], + device='cuda:2'), covar=tensor([0.0171, 0.0299, 0.0611, 0.0373, 0.0196, 0.0176, 0.0434, 0.0346], + device='cuda:2'), in_proj_covar=tensor([0.0131, 0.0126, 0.0158, 0.0151, 0.0118, 0.0107, 0.0148, 0.0144], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 09:53:02,170 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.987e+02 2.924e+02 3.407e+02 4.178e+02 8.081e+02, threshold=6.814e+02, percent-clipped=6.0 +2023-03-09 09:53:14,795 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-09 09:53:35,248 INFO [train.py:898] (2/4) Epoch 17, batch 3350, loss[loss=0.1729, simple_loss=0.2682, pruned_loss=0.03885, over 15919.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2581, pruned_loss=0.04107, over 3598361.78 frames. ], batch size: 94, lr: 6.58e-03, grad_scale: 8.0 +2023-03-09 09:54:33,206 INFO [train.py:898] (2/4) Epoch 17, batch 3400, loss[loss=0.1534, simple_loss=0.2359, pruned_loss=0.03548, over 18499.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2572, pruned_loss=0.04073, over 3593988.74 frames. ], batch size: 47, lr: 6.58e-03, grad_scale: 8.0 +2023-03-09 09:54:58,509 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.971e+02 2.720e+02 3.056e+02 3.431e+02 6.999e+02, threshold=6.113e+02, percent-clipped=1.0 +2023-03-09 09:55:31,067 INFO [train.py:898] (2/4) Epoch 17, batch 3450, loss[loss=0.1401, simple_loss=0.2203, pruned_loss=0.02992, over 18161.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.257, pruned_loss=0.04034, over 3598122.66 frames. ], batch size: 44, lr: 6.57e-03, grad_scale: 8.0 +2023-03-09 09:56:30,010 INFO [train.py:898] (2/4) Epoch 17, batch 3500, loss[loss=0.1544, simple_loss=0.2342, pruned_loss=0.03728, over 18484.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2571, pruned_loss=0.04044, over 3589981.46 frames. ], batch size: 44, lr: 6.57e-03, grad_scale: 8.0 +2023-03-09 09:56:38,540 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6188, 4.1945, 4.1193, 3.1175, 3.5678, 3.3470, 2.4677, 2.1050], + device='cuda:2'), covar=tensor([0.0232, 0.0128, 0.0094, 0.0315, 0.0335, 0.0202, 0.0735, 0.0888], + device='cuda:2'), in_proj_covar=tensor([0.0066, 0.0055, 0.0058, 0.0065, 0.0085, 0.0062, 0.0075, 0.0081], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 09:56:56,118 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.780e+02 2.828e+02 3.319e+02 4.046e+02 7.174e+02, threshold=6.638e+02, percent-clipped=2.0 +2023-03-09 09:57:25,970 INFO [train.py:898] (2/4) Epoch 17, batch 3550, loss[loss=0.1721, simple_loss=0.2721, pruned_loss=0.03599, over 17261.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2577, pruned_loss=0.04062, over 3585143.47 frames. ], batch size: 78, lr: 6.57e-03, grad_scale: 8.0 +2023-03-09 09:57:27,424 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5344, 2.7263, 2.4959, 2.8936, 3.5279, 3.4213, 3.0113, 2.8809], + device='cuda:2'), covar=tensor([0.0189, 0.0268, 0.0557, 0.0347, 0.0176, 0.0160, 0.0396, 0.0385], + device='cuda:2'), in_proj_covar=tensor([0.0133, 0.0126, 0.0159, 0.0151, 0.0119, 0.0107, 0.0148, 0.0146], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 09:57:36,234 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0977, 3.6921, 5.0400, 2.8800, 4.4948, 2.6309, 3.0656, 1.6523], + device='cuda:2'), covar=tensor([0.1002, 0.0879, 0.0109, 0.0875, 0.0482, 0.2483, 0.2655, 0.2228], + device='cuda:2'), in_proj_covar=tensor([0.0218, 0.0239, 0.0168, 0.0190, 0.0251, 0.0266, 0.0319, 0.0230], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 09:57:50,676 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=61717.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:58:07,025 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61732.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:58:18,924 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61743.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:58:20,834 INFO [train.py:898] (2/4) Epoch 17, batch 3600, loss[loss=0.1799, simple_loss=0.2723, pruned_loss=0.04375, over 18304.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2581, pruned_loss=0.04069, over 3591599.78 frames. ], batch size: 57, lr: 6.57e-03, grad_scale: 8.0 +2023-03-09 09:58:42,564 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=61765.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 09:58:44,428 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.882e+02 3.033e+02 3.500e+02 4.086e+02 7.199e+02, threshold=7.000e+02, percent-clipped=1.0 +2023-03-09 09:59:23,039 INFO [train.py:898] (2/4) Epoch 18, batch 0, loss[loss=0.1625, simple_loss=0.2469, pruned_loss=0.03902, over 18152.00 frames. ], tot_loss[loss=0.1625, simple_loss=0.2469, pruned_loss=0.03902, over 18152.00 frames. ], batch size: 44, lr: 6.38e-03, grad_scale: 8.0 +2023-03-09 09:59:23,039 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 09:59:34,862 INFO [train.py:932] (2/4) Epoch 18, validation: loss=0.1526, simple_loss=0.2531, pruned_loss=0.0261, over 944034.00 frames. +2023-03-09 09:59:34,863 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 09:59:50,896 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61793.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:00:05,799 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61804.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:00:34,744 INFO [train.py:898] (2/4) Epoch 18, batch 50, loss[loss=0.1646, simple_loss=0.2599, pruned_loss=0.03462, over 18566.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.259, pruned_loss=0.04121, over 815170.86 frames. ], batch size: 54, lr: 6.37e-03, grad_scale: 8.0 +2023-03-09 10:01:20,168 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.961e+02 2.892e+02 3.419e+02 4.081e+02 7.756e+02, threshold=6.838e+02, percent-clipped=1.0 +2023-03-09 10:01:33,981 INFO [train.py:898] (2/4) Epoch 18, batch 100, loss[loss=0.1752, simple_loss=0.266, pruned_loss=0.04216, over 18500.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2581, pruned_loss=0.04085, over 1428614.67 frames. ], batch size: 53, lr: 6.37e-03, grad_scale: 8.0 +2023-03-09 10:02:03,137 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-09 10:02:15,398 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61914.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:02:32,586 INFO [train.py:898] (2/4) Epoch 18, batch 150, loss[loss=0.1655, simple_loss=0.2461, pruned_loss=0.04242, over 18278.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2588, pruned_loss=0.04142, over 1905925.98 frames. ], batch size: 49, lr: 6.37e-03, grad_scale: 8.0 +2023-03-09 10:02:49,409 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.95 vs. limit=5.0 +2023-03-09 10:03:17,466 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.774e+02 2.985e+02 3.348e+02 3.997e+02 9.242e+02, threshold=6.695e+02, percent-clipped=1.0 +2023-03-09 10:03:27,659 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61975.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:03:32,048 INFO [train.py:898] (2/4) Epoch 18, batch 200, loss[loss=0.1507, simple_loss=0.2375, pruned_loss=0.03201, over 18494.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2584, pruned_loss=0.0414, over 2276676.90 frames. ], batch size: 44, lr: 6.37e-03, grad_scale: 8.0 +2023-03-09 10:04:35,248 INFO [train.py:898] (2/4) Epoch 18, batch 250, loss[loss=0.2325, simple_loss=0.3067, pruned_loss=0.07917, over 11927.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.26, pruned_loss=0.04209, over 2559626.10 frames. ], batch size: 130, lr: 6.36e-03, grad_scale: 8.0 +2023-03-09 10:04:38,974 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8989, 5.1207, 2.3569, 4.9537, 4.8964, 5.1554, 4.9330, 2.4683], + device='cuda:2'), covar=tensor([0.0204, 0.0083, 0.0926, 0.0112, 0.0087, 0.0081, 0.0103, 0.1077], + device='cuda:2'), in_proj_covar=tensor([0.0084, 0.0078, 0.0094, 0.0091, 0.0083, 0.0072, 0.0083, 0.0094], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 10:04:40,240 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7991, 3.7010, 5.1249, 4.6111, 3.5165, 3.1364, 4.6275, 5.3131], + device='cuda:2'), covar=tensor([0.0804, 0.1658, 0.0163, 0.0347, 0.0786, 0.1129, 0.0335, 0.0252], + device='cuda:2'), in_proj_covar=tensor([0.0145, 0.0268, 0.0138, 0.0177, 0.0187, 0.0188, 0.0190, 0.0184], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 10:04:45,088 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 10:05:14,875 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4027, 3.1084, 1.9114, 4.2105, 2.9078, 3.8023, 2.0849, 3.5166], + device='cuda:2'), covar=tensor([0.0577, 0.0872, 0.1554, 0.0480, 0.0846, 0.0331, 0.1385, 0.0519], + device='cuda:2'), in_proj_covar=tensor([0.0207, 0.0221, 0.0187, 0.0273, 0.0193, 0.0262, 0.0200, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 10:05:19,648 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.960e+02 2.782e+02 3.298e+02 4.084e+02 9.101e+02, threshold=6.597e+02, percent-clipped=2.0 +2023-03-09 10:05:34,835 INFO [train.py:898] (2/4) Epoch 18, batch 300, loss[loss=0.1796, simple_loss=0.2655, pruned_loss=0.04685, over 18363.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2598, pruned_loss=0.04206, over 2783294.81 frames. ], batch size: 56, lr: 6.36e-03, grad_scale: 8.0 +2023-03-09 10:05:45,148 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62088.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:05:57,587 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62099.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:06:24,874 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62122.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:06:33,288 INFO [train.py:898] (2/4) Epoch 18, batch 350, loss[loss=0.1605, simple_loss=0.2513, pruned_loss=0.03487, over 18414.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2594, pruned_loss=0.04154, over 2953905.84 frames. ], batch size: 48, lr: 6.36e-03, grad_scale: 8.0 +2023-03-09 10:06:35,400 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-09 10:07:17,198 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.168e+02 2.722e+02 3.288e+02 4.143e+02 7.377e+02, threshold=6.576e+02, percent-clipped=3.0 +2023-03-09 10:07:32,440 INFO [train.py:898] (2/4) Epoch 18, batch 400, loss[loss=0.147, simple_loss=0.2277, pruned_loss=0.03313, over 18241.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2592, pruned_loss=0.04124, over 3106525.12 frames. ], batch size: 45, lr: 6.36e-03, grad_scale: 8.0 +2023-03-09 10:07:37,253 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62183.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:08:26,285 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.13 vs. limit=5.0 +2023-03-09 10:08:30,233 INFO [train.py:898] (2/4) Epoch 18, batch 450, loss[loss=0.1487, simple_loss=0.2414, pruned_loss=0.02799, over 18245.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2592, pruned_loss=0.04101, over 3224488.20 frames. ], batch size: 47, lr: 6.35e-03, grad_scale: 8.0 +2023-03-09 10:08:32,226 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 10:09:15,165 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.738e+02 2.782e+02 3.170e+02 3.804e+02 6.822e+02, threshold=6.341e+02, percent-clipped=2.0 +2023-03-09 10:09:18,887 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62270.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:09:22,430 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62273.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:09:26,324 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8917, 4.5981, 4.7524, 3.6083, 3.8165, 3.5707, 2.7444, 2.5755], + device='cuda:2'), covar=tensor([0.0225, 0.0187, 0.0060, 0.0270, 0.0325, 0.0220, 0.0712, 0.0829], + device='cuda:2'), in_proj_covar=tensor([0.0067, 0.0056, 0.0058, 0.0066, 0.0087, 0.0064, 0.0076, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 10:09:29,825 INFO [train.py:898] (2/4) Epoch 18, batch 500, loss[loss=0.1604, simple_loss=0.2507, pruned_loss=0.03508, over 18282.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2591, pruned_loss=0.04122, over 3292360.20 frames. ], batch size: 47, lr: 6.35e-03, grad_scale: 8.0 +2023-03-09 10:10:28,257 INFO [train.py:898] (2/4) Epoch 18, batch 550, loss[loss=0.1769, simple_loss=0.2746, pruned_loss=0.0396, over 17798.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2587, pruned_loss=0.0409, over 3357680.04 frames. ], batch size: 70, lr: 6.35e-03, grad_scale: 8.0 +2023-03-09 10:10:34,889 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62334.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:11:13,352 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.182e+02 2.712e+02 3.173e+02 3.739e+02 5.690e+02, threshold=6.346e+02, percent-clipped=0.0 +2023-03-09 10:11:27,637 INFO [train.py:898] (2/4) Epoch 18, batch 600, loss[loss=0.1464, simple_loss=0.228, pruned_loss=0.03239, over 18243.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2582, pruned_loss=0.04061, over 3402273.15 frames. ], batch size: 45, lr: 6.35e-03, grad_scale: 8.0 +2023-03-09 10:11:38,908 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62388.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:11:43,038 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62391.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:11:52,687 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62399.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:12:26,539 INFO [train.py:898] (2/4) Epoch 18, batch 650, loss[loss=0.1578, simple_loss=0.2424, pruned_loss=0.03659, over 18355.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2586, pruned_loss=0.04062, over 3450578.83 frames. ], batch size: 46, lr: 6.34e-03, grad_scale: 8.0 +2023-03-09 10:12:35,771 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62436.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:12:48,621 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62447.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:12:55,114 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62452.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:13:12,080 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.651e+02 2.765e+02 3.266e+02 3.962e+02 6.380e+02, threshold=6.532e+02, percent-clipped=1.0 +2023-03-09 10:13:24,732 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62478.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:13:25,698 INFO [train.py:898] (2/4) Epoch 18, batch 700, loss[loss=0.1638, simple_loss=0.259, pruned_loss=0.03427, over 16159.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2588, pruned_loss=0.04061, over 3474378.90 frames. ], batch size: 94, lr: 6.34e-03, grad_scale: 8.0 +2023-03-09 10:13:47,730 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-09 10:14:21,566 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3605, 5.8585, 5.4687, 5.6712, 5.4502, 5.4118, 5.9590, 5.8866], + device='cuda:2'), covar=tensor([0.1118, 0.0849, 0.0519, 0.0710, 0.1401, 0.0648, 0.0529, 0.0691], + device='cuda:2'), in_proj_covar=tensor([0.0583, 0.0499, 0.0366, 0.0525, 0.0721, 0.0522, 0.0706, 0.0535], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 10:14:24,853 INFO [train.py:898] (2/4) Epoch 18, batch 750, loss[loss=0.1401, simple_loss=0.2277, pruned_loss=0.02621, over 18400.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2595, pruned_loss=0.04077, over 3505792.64 frames. ], batch size: 48, lr: 6.34e-03, grad_scale: 4.0 +2023-03-09 10:14:44,183 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62545.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:14:57,292 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.3669, 3.6857, 2.5174, 3.6156, 4.5743, 2.4666, 3.5297, 3.5536], + device='cuda:2'), covar=tensor([0.0192, 0.1134, 0.1466, 0.0627, 0.0089, 0.1184, 0.0666, 0.0749], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0256, 0.0198, 0.0192, 0.0111, 0.0177, 0.0209, 0.0218], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 10:15:10,951 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.733e+02 2.727e+02 3.281e+02 3.903e+02 8.552e+02, threshold=6.561e+02, percent-clipped=5.0 +2023-03-09 10:15:13,487 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62570.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:15:23,441 INFO [train.py:898] (2/4) Epoch 18, batch 800, loss[loss=0.173, simple_loss=0.265, pruned_loss=0.04053, over 18002.00 frames. ], tot_loss[loss=0.17, simple_loss=0.259, pruned_loss=0.04049, over 3520571.84 frames. ], batch size: 65, lr: 6.34e-03, grad_scale: 8.0 +2023-03-09 10:15:30,658 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62585.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:15:57,042 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62606.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:16:11,027 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62618.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:16:23,390 INFO [train.py:898] (2/4) Epoch 18, batch 850, loss[loss=0.1734, simple_loss=0.2682, pruned_loss=0.03928, over 17884.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2587, pruned_loss=0.04072, over 3531954.36 frames. ], batch size: 70, lr: 6.33e-03, grad_scale: 8.0 +2023-03-09 10:16:23,596 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62629.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:16:44,073 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62646.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:16:51,459 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6942, 4.7168, 4.8343, 4.5064, 4.5134, 4.4783, 4.8709, 4.8405], + device='cuda:2'), covar=tensor([0.0066, 0.0064, 0.0053, 0.0106, 0.0059, 0.0141, 0.0070, 0.0089], + device='cuda:2'), in_proj_covar=tensor([0.0088, 0.0064, 0.0070, 0.0087, 0.0070, 0.0097, 0.0082, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003], + device='cuda:2') +2023-03-09 10:17:08,011 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62666.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:17:09,784 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.769e+02 2.717e+02 3.249e+02 3.987e+02 1.142e+03, threshold=6.498e+02, percent-clipped=2.0 +2023-03-09 10:17:22,616 INFO [train.py:898] (2/4) Epoch 18, batch 900, loss[loss=0.1553, simple_loss=0.2398, pruned_loss=0.03544, over 18257.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2589, pruned_loss=0.04054, over 3548152.95 frames. ], batch size: 45, lr: 6.33e-03, grad_scale: 8.0 +2023-03-09 10:17:46,559 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8553, 3.6144, 5.0941, 4.6321, 3.3825, 3.2652, 4.4696, 5.2863], + device='cuda:2'), covar=tensor([0.0778, 0.1671, 0.0160, 0.0311, 0.0870, 0.0995, 0.0366, 0.0183], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0273, 0.0140, 0.0179, 0.0188, 0.0189, 0.0191, 0.0187], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 10:17:56,820 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-03-09 10:17:58,522 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3220, 3.1895, 2.2205, 4.0332, 2.8019, 3.7906, 2.1684, 3.4737], + device='cuda:2'), covar=tensor([0.0568, 0.0790, 0.1349, 0.0559, 0.0932, 0.0355, 0.1271, 0.0425], + device='cuda:2'), in_proj_covar=tensor([0.0207, 0.0220, 0.0185, 0.0271, 0.0191, 0.0262, 0.0199, 0.0196], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 10:18:19,888 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62727.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:18:21,833 INFO [train.py:898] (2/4) Epoch 18, batch 950, loss[loss=0.1536, simple_loss=0.2472, pruned_loss=0.03005, over 16117.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2583, pruned_loss=0.04046, over 3549357.34 frames. ], batch size: 94, lr: 6.33e-03, grad_scale: 8.0 +2023-03-09 10:18:22,300 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6666, 3.4188, 2.4503, 4.4439, 3.1147, 4.3112, 2.6269, 3.9978], + device='cuda:2'), covar=tensor([0.0595, 0.0811, 0.1340, 0.0484, 0.0844, 0.0356, 0.1087, 0.0386], + device='cuda:2'), in_proj_covar=tensor([0.0207, 0.0220, 0.0185, 0.0271, 0.0191, 0.0262, 0.0199, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 10:18:43,113 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62747.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:19:07,977 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.629e+02 2.750e+02 3.313e+02 3.832e+02 8.841e+02, threshold=6.625e+02, percent-clipped=2.0 +2023-03-09 10:19:14,018 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62773.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:19:20,190 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62778.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:19:21,047 INFO [train.py:898] (2/4) Epoch 18, batch 1000, loss[loss=0.1724, simple_loss=0.2662, pruned_loss=0.0393, over 18286.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2588, pruned_loss=0.04068, over 3556107.79 frames. ], batch size: 57, lr: 6.33e-03, grad_scale: 8.0 +2023-03-09 10:20:16,448 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62826.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:20:19,801 INFO [train.py:898] (2/4) Epoch 18, batch 1050, loss[loss=0.1547, simple_loss=0.2371, pruned_loss=0.03615, over 18555.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2589, pruned_loss=0.04056, over 3560312.98 frames. ], batch size: 45, lr: 6.32e-03, grad_scale: 8.0 +2023-03-09 10:20:26,336 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62834.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:21:05,604 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.820e+02 2.730e+02 3.063e+02 3.769e+02 8.307e+02, threshold=6.126e+02, percent-clipped=1.0 +2023-03-09 10:21:11,546 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0525, 5.1403, 5.3372, 5.3959, 4.9953, 5.8656, 5.5111, 5.0632], + device='cuda:2'), covar=tensor([0.1118, 0.0591, 0.0673, 0.0704, 0.1453, 0.0774, 0.0597, 0.1632], + device='cuda:2'), in_proj_covar=tensor([0.0341, 0.0273, 0.0296, 0.0294, 0.0322, 0.0404, 0.0264, 0.0400], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:2') +2023-03-09 10:21:18,727 INFO [train.py:898] (2/4) Epoch 18, batch 1100, loss[loss=0.1946, simple_loss=0.2744, pruned_loss=0.05741, over 13051.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2596, pruned_loss=0.04099, over 3559010.27 frames. ], batch size: 129, lr: 6.32e-03, grad_scale: 8.0 +2023-03-09 10:21:44,747 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62901.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:22:17,699 INFO [train.py:898] (2/4) Epoch 18, batch 1150, loss[loss=0.1721, simple_loss=0.2627, pruned_loss=0.04074, over 18257.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2594, pruned_loss=0.04078, over 3571932.27 frames. ], batch size: 60, lr: 6.32e-03, grad_scale: 8.0 +2023-03-09 10:22:17,943 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62929.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:22:32,069 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62941.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:22:40,525 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3977, 5.3251, 5.5744, 5.6425, 5.3306, 6.1463, 5.7775, 5.3688], + device='cuda:2'), covar=tensor([0.0989, 0.0587, 0.0655, 0.0625, 0.1304, 0.0674, 0.0548, 0.1628], + device='cuda:2'), in_proj_covar=tensor([0.0344, 0.0275, 0.0298, 0.0297, 0.0324, 0.0407, 0.0267, 0.0401], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:2') +2023-03-09 10:23:03,232 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.305e+02 2.742e+02 3.290e+02 3.886e+02 6.757e+02, threshold=6.580e+02, percent-clipped=2.0 +2023-03-09 10:23:07,195 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.87 vs. limit=5.0 +2023-03-09 10:23:12,988 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5036, 3.8555, 2.2892, 3.7157, 4.7114, 2.3815, 3.5954, 3.6235], + device='cuda:2'), covar=tensor([0.0192, 0.1079, 0.1845, 0.0706, 0.0107, 0.1479, 0.0763, 0.0719], + device='cuda:2'), in_proj_covar=tensor([0.0154, 0.0258, 0.0200, 0.0194, 0.0113, 0.0178, 0.0211, 0.0219], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 10:23:13,912 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=62977.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:23:15,931 INFO [train.py:898] (2/4) Epoch 18, batch 1200, loss[loss=0.1493, simple_loss=0.2323, pruned_loss=0.03316, over 18570.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2594, pruned_loss=0.04073, over 3571729.09 frames. ], batch size: 49, lr: 6.32e-03, grad_scale: 8.0 +2023-03-09 10:23:48,136 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0061, 3.6731, 5.0515, 2.9472, 4.3516, 2.7112, 3.0460, 1.8612], + device='cuda:2'), covar=tensor([0.1025, 0.0906, 0.0139, 0.0909, 0.0574, 0.2447, 0.2671, 0.2123], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0239, 0.0168, 0.0189, 0.0249, 0.0265, 0.0316, 0.0227], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 10:24:06,509 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63022.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:24:14,862 INFO [train.py:898] (2/4) Epoch 18, batch 1250, loss[loss=0.1504, simple_loss=0.2331, pruned_loss=0.03387, over 18582.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2588, pruned_loss=0.04068, over 3580473.00 frames. ], batch size: 45, lr: 6.31e-03, grad_scale: 8.0 +2023-03-09 10:24:36,012 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63047.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:24:44,732 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7227, 3.6939, 4.9331, 4.4315, 3.2123, 3.1660, 4.4331, 5.1874], + device='cuda:2'), covar=tensor([0.0818, 0.1671, 0.0207, 0.0397, 0.1013, 0.1079, 0.0372, 0.0344], + device='cuda:2'), in_proj_covar=tensor([0.0146, 0.0269, 0.0139, 0.0177, 0.0186, 0.0186, 0.0189, 0.0186], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 10:25:00,073 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.748e+02 2.862e+02 3.402e+02 4.233e+02 9.717e+02, threshold=6.805e+02, percent-clipped=8.0 +2023-03-09 10:25:13,512 INFO [train.py:898] (2/4) Epoch 18, batch 1300, loss[loss=0.1703, simple_loss=0.2591, pruned_loss=0.04074, over 18357.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2587, pruned_loss=0.04073, over 3586129.82 frames. ], batch size: 55, lr: 6.31e-03, grad_scale: 8.0 +2023-03-09 10:25:32,655 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63095.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:26:12,336 INFO [train.py:898] (2/4) Epoch 18, batch 1350, loss[loss=0.1876, simple_loss=0.2672, pruned_loss=0.05401, over 12987.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2572, pruned_loss=0.04032, over 3582781.62 frames. ], batch size: 129, lr: 6.31e-03, grad_scale: 8.0 +2023-03-09 10:26:13,177 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63129.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:26:19,042 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7345, 3.8944, 5.0546, 4.5481, 3.4481, 3.1626, 4.5940, 5.3199], + device='cuda:2'), covar=tensor([0.0827, 0.1363, 0.0172, 0.0370, 0.0868, 0.1108, 0.0346, 0.0220], + device='cuda:2'), in_proj_covar=tensor([0.0146, 0.0269, 0.0139, 0.0177, 0.0186, 0.0187, 0.0190, 0.0186], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 10:26:58,487 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.671e+02 2.877e+02 3.417e+02 4.263e+02 7.577e+02, threshold=6.833e+02, percent-clipped=1.0 +2023-03-09 10:27:11,797 INFO [train.py:898] (2/4) Epoch 18, batch 1400, loss[loss=0.1654, simple_loss=0.2542, pruned_loss=0.03824, over 18280.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2569, pruned_loss=0.03995, over 3596999.75 frames. ], batch size: 49, lr: 6.31e-03, grad_scale: 8.0 +2023-03-09 10:27:38,956 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63201.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:28:02,594 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63221.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:28:11,965 INFO [train.py:898] (2/4) Epoch 18, batch 1450, loss[loss=0.1621, simple_loss=0.2394, pruned_loss=0.04238, over 17637.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2562, pruned_loss=0.03986, over 3595277.43 frames. ], batch size: 39, lr: 6.30e-03, grad_scale: 8.0 +2023-03-09 10:28:21,011 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.39 vs. limit=5.0 +2023-03-09 10:28:26,207 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63241.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:28:35,861 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63249.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:28:57,803 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.907e+02 2.745e+02 3.338e+02 4.107e+02 1.231e+03, threshold=6.676e+02, percent-clipped=2.0 +2023-03-09 10:29:10,045 INFO [train.py:898] (2/4) Epoch 18, batch 1500, loss[loss=0.1649, simple_loss=0.2513, pruned_loss=0.03929, over 18382.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2567, pruned_loss=0.03988, over 3603204.23 frames. ], batch size: 50, lr: 6.30e-03, grad_scale: 8.0 +2023-03-09 10:29:14,245 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63282.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:29:22,577 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63289.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:30:01,283 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63322.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:30:09,223 INFO [train.py:898] (2/4) Epoch 18, batch 1550, loss[loss=0.1542, simple_loss=0.2488, pruned_loss=0.02983, over 18498.00 frames. ], tot_loss[loss=0.1675, simple_loss=0.2558, pruned_loss=0.03956, over 3600737.91 frames. ], batch size: 51, lr: 6.30e-03, grad_scale: 8.0 +2023-03-09 10:30:55,758 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.939e+02 2.883e+02 3.563e+02 4.171e+02 7.864e+02, threshold=7.125e+02, percent-clipped=2.0 +2023-03-09 10:30:58,245 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63370.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:31:08,379 INFO [train.py:898] (2/4) Epoch 18, batch 1600, loss[loss=0.1938, simple_loss=0.2784, pruned_loss=0.05464, over 18248.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2559, pruned_loss=0.03974, over 3594718.93 frames. ], batch size: 60, lr: 6.30e-03, grad_scale: 8.0 +2023-03-09 10:31:58,091 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63420.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:32:00,387 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63422.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:32:08,059 INFO [train.py:898] (2/4) Epoch 18, batch 1650, loss[loss=0.1673, simple_loss=0.2551, pruned_loss=0.03973, over 18587.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.257, pruned_loss=0.03995, over 3586350.25 frames. ], batch size: 54, lr: 6.29e-03, grad_scale: 8.0 +2023-03-09 10:32:08,349 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63429.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:32:54,723 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.082e+02 2.837e+02 3.509e+02 4.099e+02 8.239e+02, threshold=7.017e+02, percent-clipped=1.0 +2023-03-09 10:32:57,806 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-09 10:33:05,403 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63477.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:33:07,400 INFO [train.py:898] (2/4) Epoch 18, batch 1700, loss[loss=0.2384, simple_loss=0.3086, pruned_loss=0.08412, over 12220.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2574, pruned_loss=0.03998, over 3581648.92 frames. ], batch size: 130, lr: 6.29e-03, grad_scale: 8.0 +2023-03-09 10:33:10,132 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63481.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:33:12,262 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63483.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:34:06,942 INFO [train.py:898] (2/4) Epoch 18, batch 1750, loss[loss=0.1468, simple_loss=0.2322, pruned_loss=0.03068, over 18255.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.2571, pruned_loss=0.04001, over 3588484.13 frames. ], batch size: 47, lr: 6.29e-03, grad_scale: 8.0 +2023-03-09 10:34:26,220 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.62 vs. limit=2.0 +2023-03-09 10:34:52,622 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.976e+02 2.714e+02 3.242e+02 3.831e+02 6.997e+02, threshold=6.484e+02, percent-clipped=0.0 +2023-03-09 10:34:54,110 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63569.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:35:03,625 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63577.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:35:05,723 INFO [train.py:898] (2/4) Epoch 18, batch 1800, loss[loss=0.2099, simple_loss=0.2887, pruned_loss=0.06554, over 12050.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2565, pruned_loss=0.03976, over 3575674.28 frames. ], batch size: 130, lr: 6.29e-03, grad_scale: 8.0 +2023-03-09 10:35:13,939 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63586.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:36:05,206 INFO [train.py:898] (2/4) Epoch 18, batch 1850, loss[loss=0.1736, simple_loss=0.272, pruned_loss=0.03757, over 17774.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.2563, pruned_loss=0.03949, over 3582969.85 frames. ], batch size: 70, lr: 6.28e-03, grad_scale: 8.0 +2023-03-09 10:36:06,711 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63630.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:36:14,575 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8894, 4.1394, 4.1280, 4.2096, 3.7863, 4.0768, 3.7787, 4.0783], + device='cuda:2'), covar=tensor([0.0249, 0.0380, 0.0254, 0.0471, 0.0355, 0.0241, 0.0849, 0.0342], + device='cuda:2'), in_proj_covar=tensor([0.0207, 0.0251, 0.0242, 0.0309, 0.0262, 0.0257, 0.0298, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 10:36:26,788 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63647.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:36:51,538 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.913e+02 2.867e+02 3.371e+02 3.791e+02 7.715e+02, threshold=6.743e+02, percent-clipped=3.0 +2023-03-09 10:37:04,239 INFO [train.py:898] (2/4) Epoch 18, batch 1900, loss[loss=0.1664, simple_loss=0.2604, pruned_loss=0.03616, over 18118.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2564, pruned_loss=0.03968, over 3585226.08 frames. ], batch size: 62, lr: 6.28e-03, grad_scale: 8.0 +2023-03-09 10:37:19,041 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8570, 3.6559, 5.3434, 3.0916, 4.7588, 2.6708, 3.1298, 2.0428], + device='cuda:2'), covar=tensor([0.1111, 0.0930, 0.0112, 0.0699, 0.0404, 0.2443, 0.2639, 0.1966], + device='cuda:2'), in_proj_covar=tensor([0.0216, 0.0239, 0.0170, 0.0191, 0.0251, 0.0265, 0.0317, 0.0227], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 10:37:32,356 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.56 vs. limit=5.0 +2023-03-09 10:38:02,783 INFO [train.py:898] (2/4) Epoch 18, batch 1950, loss[loss=0.2184, simple_loss=0.297, pruned_loss=0.06989, over 16346.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2574, pruned_loss=0.04012, over 3586921.40 frames. ], batch size: 94, lr: 6.28e-03, grad_scale: 8.0 +2023-03-09 10:38:19,612 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9985, 4.6333, 4.7393, 3.6061, 3.8818, 3.5730, 2.8901, 2.4273], + device='cuda:2'), covar=tensor([0.0197, 0.0157, 0.0069, 0.0263, 0.0315, 0.0227, 0.0612, 0.0878], + device='cuda:2'), in_proj_covar=tensor([0.0067, 0.0056, 0.0059, 0.0066, 0.0086, 0.0064, 0.0075, 0.0082], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 10:38:50,309 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.821e+02 2.688e+02 3.189e+02 3.862e+02 9.985e+02, threshold=6.379e+02, percent-clipped=3.0 +2023-03-09 10:38:59,655 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63776.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:39:02,029 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63778.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:39:02,984 INFO [train.py:898] (2/4) Epoch 18, batch 2000, loss[loss=0.157, simple_loss=0.2443, pruned_loss=0.03484, over 18502.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2571, pruned_loss=0.03991, over 3585490.34 frames. ], batch size: 47, lr: 6.28e-03, grad_scale: 8.0 +2023-03-09 10:39:40,075 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63810.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:40:02,104 INFO [train.py:898] (2/4) Epoch 18, batch 2050, loss[loss=0.1893, simple_loss=0.2793, pruned_loss=0.0497, over 12282.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2582, pruned_loss=0.04051, over 3577476.97 frames. ], batch size: 129, lr: 6.27e-03, grad_scale: 8.0 +2023-03-09 10:40:14,304 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6238, 2.5987, 2.5958, 2.4468, 2.5327, 2.1553, 2.2540, 2.6469], + device='cuda:2'), covar=tensor([0.0074, 0.0083, 0.0066, 0.0101, 0.0091, 0.0163, 0.0169, 0.0061], + device='cuda:2'), in_proj_covar=tensor([0.0127, 0.0151, 0.0127, 0.0179, 0.0133, 0.0171, 0.0176, 0.0112], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 10:40:21,097 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63845.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:40:48,571 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.076e+02 2.804e+02 3.273e+02 4.132e+02 7.162e+02, threshold=6.546e+02, percent-clipped=3.0 +2023-03-09 10:40:53,083 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63871.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:40:59,898 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63877.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:41:01,889 INFO [train.py:898] (2/4) Epoch 18, batch 2100, loss[loss=0.1839, simple_loss=0.2811, pruned_loss=0.04328, over 18249.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2583, pruned_loss=0.04041, over 3578739.16 frames. ], batch size: 60, lr: 6.27e-03, grad_scale: 8.0 +2023-03-09 10:41:26,456 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5195, 3.3577, 2.1803, 4.3576, 3.1741, 4.3183, 2.2669, 3.8312], + device='cuda:2'), covar=tensor([0.0558, 0.0863, 0.1391, 0.0471, 0.0817, 0.0387, 0.1260, 0.0429], + device='cuda:2'), in_proj_covar=tensor([0.0205, 0.0219, 0.0183, 0.0271, 0.0188, 0.0258, 0.0197, 0.0194], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 10:41:33,644 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63906.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:41:45,704 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63916.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:41:56,473 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=63925.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:41:56,505 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63925.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:42:00,982 INFO [train.py:898] (2/4) Epoch 18, batch 2150, loss[loss=0.1681, simple_loss=0.2614, pruned_loss=0.03735, over 18275.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2581, pruned_loss=0.04041, over 3585674.46 frames. ], batch size: 57, lr: 6.27e-03, grad_scale: 8.0 +2023-03-09 10:42:13,836 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8900, 4.9213, 5.0739, 4.7514, 4.7493, 4.7887, 5.1424, 5.0900], + device='cuda:2'), covar=tensor([0.0076, 0.0084, 0.0062, 0.0138, 0.0070, 0.0177, 0.0099, 0.0121], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0066, 0.0071, 0.0090, 0.0072, 0.0100, 0.0083, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 10:42:16,531 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63942.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:42:39,236 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-09 10:42:47,029 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.000e+02 2.608e+02 3.106e+02 3.507e+02 7.149e+02, threshold=6.212e+02, percent-clipped=2.0 +2023-03-09 10:42:53,090 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4288, 3.2874, 2.3822, 4.3309, 3.0965, 4.2554, 2.3386, 3.9548], + device='cuda:2'), covar=tensor([0.0687, 0.0923, 0.1345, 0.0472, 0.0825, 0.0313, 0.1304, 0.0385], + device='cuda:2'), in_proj_covar=tensor([0.0207, 0.0220, 0.0184, 0.0272, 0.0188, 0.0258, 0.0198, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 10:42:57,356 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0393, 4.5243, 4.1717, 4.2648, 4.0802, 4.7338, 4.4248, 4.2874], + device='cuda:2'), covar=tensor([0.1302, 0.1060, 0.1056, 0.0888, 0.1516, 0.1182, 0.0769, 0.1847], + device='cuda:2'), in_proj_covar=tensor([0.0343, 0.0270, 0.0294, 0.0292, 0.0318, 0.0402, 0.0267, 0.0396], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:2') +2023-03-09 10:42:57,499 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63977.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:42:59,823 INFO [train.py:898] (2/4) Epoch 18, batch 2200, loss[loss=0.1769, simple_loss=0.2699, pruned_loss=0.04196, over 18599.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2586, pruned_loss=0.04048, over 3582416.51 frames. ], batch size: 52, lr: 6.27e-03, grad_scale: 8.0 +2023-03-09 10:43:44,395 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8713, 3.8244, 5.0202, 4.4863, 3.3628, 3.2924, 4.6571, 5.3755], + device='cuda:2'), covar=tensor([0.0742, 0.1437, 0.0223, 0.0394, 0.0879, 0.1021, 0.0319, 0.0214], + device='cuda:2'), in_proj_covar=tensor([0.0146, 0.0270, 0.0141, 0.0180, 0.0187, 0.0186, 0.0191, 0.0186], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 10:43:51,593 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5589, 6.0344, 5.5397, 5.8525, 5.6373, 5.4814, 6.1076, 6.0388], + device='cuda:2'), covar=tensor([0.1131, 0.0733, 0.0486, 0.0702, 0.1439, 0.0668, 0.0555, 0.0675], + device='cuda:2'), in_proj_covar=tensor([0.0598, 0.0506, 0.0369, 0.0538, 0.0734, 0.0535, 0.0724, 0.0551], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 10:43:53,831 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4615, 5.9855, 5.4808, 5.7743, 5.6072, 5.4030, 6.0423, 5.9925], + device='cuda:2'), covar=tensor([0.1142, 0.0761, 0.0492, 0.0709, 0.1332, 0.0657, 0.0532, 0.0701], + device='cuda:2'), in_proj_covar=tensor([0.0597, 0.0506, 0.0369, 0.0537, 0.0734, 0.0534, 0.0723, 0.0551], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 10:43:56,215 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64023.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:44:02,723 INFO [train.py:898] (2/4) Epoch 18, batch 2250, loss[loss=0.1507, simple_loss=0.234, pruned_loss=0.03366, over 18245.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2584, pruned_loss=0.04033, over 3585975.26 frames. ], batch size: 45, lr: 6.26e-03, grad_scale: 8.0 +2023-03-09 10:44:15,022 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9453, 5.3568, 2.9843, 5.1607, 5.0619, 5.3602, 5.1413, 2.8636], + device='cuda:2'), covar=tensor([0.0195, 0.0059, 0.0675, 0.0076, 0.0068, 0.0073, 0.0082, 0.0891], + device='cuda:2'), in_proj_covar=tensor([0.0085, 0.0078, 0.0094, 0.0091, 0.0083, 0.0073, 0.0083, 0.0095], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 10:44:47,978 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.732e+02 2.740e+02 3.229e+02 3.665e+02 1.396e+03, threshold=6.458e+02, percent-clipped=4.0 +2023-03-09 10:44:58,015 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64076.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:45:00,325 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64078.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:45:01,194 INFO [train.py:898] (2/4) Epoch 18, batch 2300, loss[loss=0.1404, simple_loss=0.227, pruned_loss=0.02697, over 18367.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2583, pruned_loss=0.04054, over 3581529.31 frames. ], batch size: 42, lr: 6.26e-03, grad_scale: 8.0 +2023-03-09 10:45:07,213 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64084.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:45:11,331 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4587, 3.2667, 2.0048, 4.2784, 3.0362, 4.1861, 2.3092, 3.8429], + device='cuda:2'), covar=tensor([0.0683, 0.0952, 0.1578, 0.0537, 0.0900, 0.0357, 0.1307, 0.0414], + device='cuda:2'), in_proj_covar=tensor([0.0207, 0.0219, 0.0185, 0.0272, 0.0188, 0.0259, 0.0198, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 10:45:20,095 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5512, 5.5376, 5.1530, 5.4595, 5.4217, 4.8189, 5.3875, 5.0726], + device='cuda:2'), covar=tensor([0.0413, 0.0403, 0.1315, 0.0787, 0.0624, 0.0399, 0.0391, 0.1052], + device='cuda:2'), in_proj_covar=tensor([0.0466, 0.0527, 0.0680, 0.0418, 0.0427, 0.0487, 0.0518, 0.0649], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 10:45:54,298 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64124.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:45:56,502 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64126.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:45:59,693 INFO [train.py:898] (2/4) Epoch 18, batch 2350, loss[loss=0.1529, simple_loss=0.246, pruned_loss=0.02983, over 18254.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2584, pruned_loss=0.0402, over 3595070.73 frames. ], batch size: 45, lr: 6.26e-03, grad_scale: 8.0 +2023-03-09 10:46:04,987 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-09 10:46:43,653 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64166.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:46:45,504 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.074e+02 2.798e+02 3.262e+02 4.005e+02 1.022e+03, threshold=6.524e+02, percent-clipped=3.0 +2023-03-09 10:46:58,411 INFO [train.py:898] (2/4) Epoch 18, batch 2400, loss[loss=0.1695, simple_loss=0.2664, pruned_loss=0.03635, over 17850.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2579, pruned_loss=0.04023, over 3594628.45 frames. ], batch size: 70, lr: 6.26e-03, grad_scale: 8.0 +2023-03-09 10:47:24,709 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64201.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:47:41,729 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0055, 4.2415, 2.5954, 4.1115, 5.2800, 2.7773, 3.8588, 4.1857], + device='cuda:2'), covar=tensor([0.0145, 0.1125, 0.1531, 0.0616, 0.0059, 0.1123, 0.0655, 0.0612], + device='cuda:2'), in_proj_covar=tensor([0.0153, 0.0259, 0.0199, 0.0193, 0.0113, 0.0179, 0.0211, 0.0219], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0001, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 10:47:53,625 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64225.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:47:58,504 INFO [train.py:898] (2/4) Epoch 18, batch 2450, loss[loss=0.1747, simple_loss=0.264, pruned_loss=0.04267, over 18545.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.258, pruned_loss=0.04017, over 3593515.73 frames. ], batch size: 49, lr: 6.26e-03, grad_scale: 8.0 +2023-03-09 10:48:02,348 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64232.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:48:11,390 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9840, 5.5261, 5.5164, 5.4939, 5.0245, 5.4476, 4.8174, 5.4221], + device='cuda:2'), covar=tensor([0.0239, 0.0247, 0.0172, 0.0359, 0.0393, 0.0196, 0.1054, 0.0235], + device='cuda:2'), in_proj_covar=tensor([0.0208, 0.0253, 0.0243, 0.0310, 0.0262, 0.0256, 0.0302, 0.0250], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 10:48:14,157 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64242.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:48:15,511 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2941, 3.2306, 2.1140, 3.9835, 2.8788, 3.6828, 2.2527, 3.3905], + device='cuda:2'), covar=tensor([0.0553, 0.0797, 0.1333, 0.0518, 0.0782, 0.0302, 0.1289, 0.0456], + device='cuda:2'), in_proj_covar=tensor([0.0207, 0.0219, 0.0184, 0.0273, 0.0188, 0.0259, 0.0198, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 10:48:25,548 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-09 10:48:44,850 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.830e+02 2.733e+02 3.243e+02 4.040e+02 1.212e+03, threshold=6.487e+02, percent-clipped=5.0 +2023-03-09 10:48:49,183 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64272.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:48:50,284 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64273.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:48:57,009 INFO [train.py:898] (2/4) Epoch 18, batch 2500, loss[loss=0.1729, simple_loss=0.2569, pruned_loss=0.04446, over 18534.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2579, pruned_loss=0.04006, over 3593413.51 frames. ], batch size: 49, lr: 6.25e-03, grad_scale: 4.0 +2023-03-09 10:49:10,384 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64290.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:49:14,750 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64293.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:49:56,124 INFO [train.py:898] (2/4) Epoch 18, batch 2550, loss[loss=0.1363, simple_loss=0.2204, pruned_loss=0.02612, over 18425.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2579, pruned_loss=0.0403, over 3589685.20 frames. ], batch size: 43, lr: 6.25e-03, grad_scale: 4.0 +2023-03-09 10:50:01,001 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-09 10:50:03,986 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8923, 4.6664, 4.7881, 3.5833, 3.8660, 3.6036, 2.8338, 2.7569], + device='cuda:2'), covar=tensor([0.0215, 0.0149, 0.0072, 0.0285, 0.0345, 0.0202, 0.0688, 0.0807], + device='cuda:2'), in_proj_covar=tensor([0.0068, 0.0057, 0.0060, 0.0067, 0.0087, 0.0065, 0.0076, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 10:50:15,825 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64345.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:50:43,210 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.795e+02 2.856e+02 3.469e+02 4.229e+02 8.675e+02, threshold=6.938e+02, percent-clipped=2.0 +2023-03-09 10:50:54,972 INFO [train.py:898] (2/4) Epoch 18, batch 2600, loss[loss=0.1871, simple_loss=0.2734, pruned_loss=0.05035, over 17723.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2571, pruned_loss=0.03995, over 3596192.38 frames. ], batch size: 70, lr: 6.25e-03, grad_scale: 4.0 +2023-03-09 10:50:55,201 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64379.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:50:59,803 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-09 10:51:27,122 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64406.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:51:53,005 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-09 10:51:53,362 INFO [train.py:898] (2/4) Epoch 18, batch 2650, loss[loss=0.1567, simple_loss=0.2417, pruned_loss=0.03582, over 18241.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2575, pruned_loss=0.04, over 3590237.61 frames. ], batch size: 45, lr: 6.25e-03, grad_scale: 4.0 +2023-03-09 10:52:16,306 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8813, 4.5917, 4.6573, 3.5614, 3.7633, 3.5488, 2.7073, 2.5007], + device='cuda:2'), covar=tensor([0.0214, 0.0134, 0.0070, 0.0279, 0.0389, 0.0213, 0.0699, 0.0852], + device='cuda:2'), in_proj_covar=tensor([0.0068, 0.0057, 0.0060, 0.0067, 0.0088, 0.0066, 0.0076, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 10:52:17,399 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5647, 6.0783, 5.5732, 5.8945, 5.6818, 5.5472, 6.1619, 6.0953], + device='cuda:2'), covar=tensor([0.1061, 0.0750, 0.0439, 0.0624, 0.1309, 0.0629, 0.0481, 0.0641], + device='cuda:2'), in_proj_covar=tensor([0.0596, 0.0508, 0.0368, 0.0529, 0.0730, 0.0530, 0.0719, 0.0547], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 10:52:22,064 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7252, 5.2383, 5.1733, 5.2159, 4.7582, 5.0900, 4.5513, 5.0992], + device='cuda:2'), covar=tensor([0.0241, 0.0280, 0.0192, 0.0408, 0.0350, 0.0241, 0.1064, 0.0295], + device='cuda:2'), in_proj_covar=tensor([0.0207, 0.0253, 0.0243, 0.0309, 0.0260, 0.0256, 0.0298, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 10:52:35,709 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64465.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:52:36,784 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64466.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:52:39,841 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.908e+02 2.670e+02 3.183e+02 3.846e+02 7.889e+02, threshold=6.366e+02, percent-clipped=2.0 +2023-03-09 10:52:52,153 INFO [train.py:898] (2/4) Epoch 18, batch 2700, loss[loss=0.1453, simple_loss=0.2303, pruned_loss=0.0301, over 18355.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.257, pruned_loss=0.03979, over 3593489.32 frames. ], batch size: 46, lr: 6.24e-03, grad_scale: 4.0 +2023-03-09 10:52:58,728 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.79 vs. limit=5.0 +2023-03-09 10:53:18,555 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64501.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:53:18,690 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9250, 3.9605, 5.1018, 2.7060, 4.5607, 2.6890, 3.1442, 1.8584], + device='cuda:2'), covar=tensor([0.1123, 0.0825, 0.0155, 0.0948, 0.0527, 0.2457, 0.2594, 0.2118], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0237, 0.0170, 0.0190, 0.0251, 0.0265, 0.0314, 0.0227], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 10:53:32,924 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64514.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:53:47,603 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64526.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:53:50,709 INFO [train.py:898] (2/4) Epoch 18, batch 2750, loss[loss=0.1705, simple_loss=0.2614, pruned_loss=0.03979, over 18315.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2564, pruned_loss=0.03973, over 3590628.32 frames. ], batch size: 54, lr: 6.24e-03, grad_scale: 4.0 +2023-03-09 10:54:14,692 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64549.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:54:17,056 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5189, 5.1060, 5.0336, 5.0570, 4.5982, 4.9341, 4.4257, 4.9636], + device='cuda:2'), covar=tensor([0.0275, 0.0298, 0.0232, 0.0399, 0.0406, 0.0255, 0.1129, 0.0339], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0256, 0.0245, 0.0314, 0.0265, 0.0259, 0.0301, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 10:54:37,412 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.946e+02 2.775e+02 3.277e+02 4.138e+02 9.183e+02, threshold=6.554e+02, percent-clipped=4.0 +2023-03-09 10:54:41,168 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64572.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:54:49,210 INFO [train.py:898] (2/4) Epoch 18, batch 2800, loss[loss=0.1472, simple_loss=0.2258, pruned_loss=0.03424, over 18442.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2574, pruned_loss=0.03978, over 3591239.68 frames. ], batch size: 43, lr: 6.24e-03, grad_scale: 8.0 +2023-03-09 10:55:00,304 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64588.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:55:00,428 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8651, 5.4383, 5.4356, 5.3618, 4.9511, 5.3331, 4.8330, 5.3418], + device='cuda:2'), covar=tensor([0.0275, 0.0254, 0.0164, 0.0368, 0.0375, 0.0217, 0.0871, 0.0293], + device='cuda:2'), in_proj_covar=tensor([0.0209, 0.0253, 0.0242, 0.0311, 0.0262, 0.0257, 0.0297, 0.0250], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 10:55:01,591 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64589.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:55:37,454 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64620.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:55:47,452 INFO [train.py:898] (2/4) Epoch 18, batch 2850, loss[loss=0.1835, simple_loss=0.2725, pruned_loss=0.04723, over 18243.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.2576, pruned_loss=0.03954, over 3597279.97 frames. ], batch size: 60, lr: 6.24e-03, grad_scale: 8.0 +2023-03-09 10:55:50,627 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0522, 3.9415, 5.2929, 3.1799, 4.6792, 2.7891, 3.2076, 2.0224], + device='cuda:2'), covar=tensor([0.0971, 0.0797, 0.0133, 0.0748, 0.0477, 0.2418, 0.2521, 0.1887], + device='cuda:2'), in_proj_covar=tensor([0.0214, 0.0236, 0.0170, 0.0189, 0.0251, 0.0265, 0.0313, 0.0227], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 10:56:00,228 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2482, 2.6742, 2.3256, 2.6581, 3.3309, 3.2559, 2.8369, 2.6385], + device='cuda:2'), covar=tensor([0.0207, 0.0262, 0.0610, 0.0419, 0.0200, 0.0178, 0.0411, 0.0403], + device='cuda:2'), in_proj_covar=tensor([0.0135, 0.0132, 0.0163, 0.0154, 0.0126, 0.0110, 0.0151, 0.0152], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 10:56:10,998 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64648.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:56:13,235 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64650.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:56:23,548 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-03-09 10:56:34,846 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.051e+02 2.756e+02 3.294e+02 3.793e+02 6.493e+02, threshold=6.588e+02, percent-clipped=0.0 +2023-03-09 10:56:46,013 INFO [train.py:898] (2/4) Epoch 18, batch 2900, loss[loss=0.1705, simple_loss=0.2629, pruned_loss=0.03902, over 17112.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.2571, pruned_loss=0.03925, over 3597714.27 frames. ], batch size: 78, lr: 6.23e-03, grad_scale: 8.0 +2023-03-09 10:56:46,346 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64679.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:57:01,706 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4743, 2.7726, 4.3448, 3.7118, 2.4573, 4.6364, 3.9825, 2.7621], + device='cuda:2'), covar=tensor([0.0549, 0.1536, 0.0244, 0.0392, 0.1698, 0.0173, 0.0503, 0.0996], + device='cuda:2'), in_proj_covar=tensor([0.0204, 0.0234, 0.0195, 0.0154, 0.0222, 0.0205, 0.0238, 0.0196], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 10:57:12,601 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64701.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:57:14,997 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64703.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:57:21,977 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64709.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:57:42,565 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64727.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:57:44,566 INFO [train.py:898] (2/4) Epoch 18, batch 2950, loss[loss=0.1924, simple_loss=0.2835, pruned_loss=0.05065, over 18403.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2568, pruned_loss=0.03966, over 3598420.16 frames. ], batch size: 52, lr: 6.23e-03, grad_scale: 8.0 +2023-03-09 10:58:04,182 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64745.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:58:12,626 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64752.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:58:26,559 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64764.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:58:31,869 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.931e+02 2.790e+02 3.324e+02 3.890e+02 1.048e+03, threshold=6.648e+02, percent-clipped=3.0 +2023-03-09 10:58:43,053 INFO [train.py:898] (2/4) Epoch 18, batch 3000, loss[loss=0.1543, simple_loss=0.2389, pruned_loss=0.03485, over 17753.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2566, pruned_loss=0.03962, over 3594196.78 frames. ], batch size: 39, lr: 6.23e-03, grad_scale: 8.0 +2023-03-09 10:58:43,054 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 10:58:55,123 INFO [train.py:932] (2/4) Epoch 18, validation: loss=0.1513, simple_loss=0.2515, pruned_loss=0.02557, over 944034.00 frames. +2023-03-09 10:58:55,124 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 10:59:19,074 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-03-09 10:59:28,047 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64806.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:59:36,032 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64813.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:59:44,855 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64821.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 10:59:53,920 INFO [train.py:898] (2/4) Epoch 18, batch 3050, loss[loss=0.1614, simple_loss=0.2373, pruned_loss=0.04273, over 18149.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2569, pruned_loss=0.03993, over 3589549.26 frames. ], batch size: 44, lr: 6.23e-03, grad_scale: 8.0 +2023-03-09 11:00:41,795 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.847e+02 2.582e+02 2.978e+02 3.625e+02 9.287e+02, threshold=5.956e+02, percent-clipped=2.0 +2023-03-09 11:00:52,628 INFO [train.py:898] (2/4) Epoch 18, batch 3100, loss[loss=0.1868, simple_loss=0.2737, pruned_loss=0.04999, over 16867.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2576, pruned_loss=0.04003, over 3590375.94 frames. ], batch size: 78, lr: 6.22e-03, grad_scale: 8.0 +2023-03-09 11:01:04,247 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64888.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:01:25,439 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-03-09 11:01:51,081 INFO [train.py:898] (2/4) Epoch 18, batch 3150, loss[loss=0.1696, simple_loss=0.2589, pruned_loss=0.04016, over 18493.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2574, pruned_loss=0.03982, over 3581505.76 frames. ], batch size: 51, lr: 6.22e-03, grad_scale: 8.0 +2023-03-09 11:01:59,102 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=64936.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:02:10,209 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64945.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:02:38,246 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.946e+02 2.751e+02 3.227e+02 4.235e+02 1.394e+03, threshold=6.453e+02, percent-clipped=7.0 +2023-03-09 11:02:46,561 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64976.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:02:49,636 INFO [train.py:898] (2/4) Epoch 18, batch 3200, loss[loss=0.1997, simple_loss=0.2979, pruned_loss=0.05075, over 18170.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2575, pruned_loss=0.0401, over 3578270.12 frames. ], batch size: 62, lr: 6.22e-03, grad_scale: 8.0 +2023-03-09 11:03:16,779 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65001.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:03:20,124 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65004.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:03:48,369 INFO [train.py:898] (2/4) Epoch 18, batch 3250, loss[loss=0.1811, simple_loss=0.275, pruned_loss=0.04358, over 17984.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2577, pruned_loss=0.04005, over 3590925.73 frames. ], batch size: 65, lr: 6.22e-03, grad_scale: 8.0 +2023-03-09 11:03:58,061 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65037.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:04:12,034 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65049.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:04:18,523 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6218, 3.9157, 5.0002, 4.2156, 2.5544, 2.6190, 4.0998, 5.2009], + device='cuda:2'), covar=tensor([0.0814, 0.1246, 0.0173, 0.0417, 0.1228, 0.1333, 0.0490, 0.0187], + device='cuda:2'), in_proj_covar=tensor([0.0146, 0.0271, 0.0140, 0.0179, 0.0188, 0.0188, 0.0191, 0.0186], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 11:04:19,625 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65055.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:04:24,048 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65059.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:04:28,179 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-09 11:04:35,618 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.699e+02 2.719e+02 3.291e+02 4.073e+02 7.543e+02, threshold=6.581e+02, percent-clipped=1.0 +2023-03-09 11:04:46,977 INFO [train.py:898] (2/4) Epoch 18, batch 3300, loss[loss=0.1609, simple_loss=0.2431, pruned_loss=0.03935, over 18388.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2577, pruned_loss=0.04031, over 3575974.06 frames. ], batch size: 46, lr: 6.21e-03, grad_scale: 4.0 +2023-03-09 11:05:13,386 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65101.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:05:21,864 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65108.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:05:31,467 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65116.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:05:32,748 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8540, 4.5783, 4.6203, 3.4186, 3.8392, 3.4065, 2.6129, 2.6275], + device='cuda:2'), covar=tensor([0.0213, 0.0173, 0.0079, 0.0305, 0.0302, 0.0235, 0.0739, 0.0803], + device='cuda:2'), in_proj_covar=tensor([0.0068, 0.0057, 0.0060, 0.0067, 0.0087, 0.0066, 0.0076, 0.0083], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 11:05:37,671 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65121.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:05:46,559 INFO [train.py:898] (2/4) Epoch 18, batch 3350, loss[loss=0.1579, simple_loss=0.2452, pruned_loss=0.0353, over 18525.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2579, pruned_loss=0.04043, over 3575075.27 frames. ], batch size: 49, lr: 6.21e-03, grad_scale: 4.0 +2023-03-09 11:06:33,115 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65169.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:06:34,018 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.758e+02 2.720e+02 3.418e+02 4.125e+02 2.086e+03, threshold=6.835e+02, percent-clipped=5.0 +2023-03-09 11:06:44,722 INFO [train.py:898] (2/4) Epoch 18, batch 3400, loss[loss=0.1753, simple_loss=0.268, pruned_loss=0.04131, over 17992.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2571, pruned_loss=0.0401, over 3583568.39 frames. ], batch size: 65, lr: 6.21e-03, grad_scale: 4.0 +2023-03-09 11:07:43,173 INFO [train.py:898] (2/4) Epoch 18, batch 3450, loss[loss=0.1575, simple_loss=0.2441, pruned_loss=0.03546, over 18378.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2573, pruned_loss=0.04029, over 3591417.15 frames. ], batch size: 50, lr: 6.21e-03, grad_scale: 4.0 +2023-03-09 11:07:44,020 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.39 vs. limit=5.0 +2023-03-09 11:08:01,671 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65245.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:08:24,798 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2081, 5.2052, 5.3064, 5.0423, 5.0016, 5.0633, 5.3910, 5.3549], + device='cuda:2'), covar=tensor([0.0061, 0.0052, 0.0047, 0.0083, 0.0063, 0.0128, 0.0054, 0.0075], + device='cuda:2'), in_proj_covar=tensor([0.0091, 0.0067, 0.0071, 0.0090, 0.0073, 0.0101, 0.0085, 0.0085], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 11:08:31,048 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.103e+02 2.885e+02 3.344e+02 3.937e+02 8.073e+02, threshold=6.688e+02, percent-clipped=3.0 +2023-03-09 11:08:37,256 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8388, 3.6651, 5.0743, 4.4866, 3.3187, 3.0964, 4.4365, 5.2851], + device='cuda:2'), covar=tensor([0.0798, 0.1561, 0.0161, 0.0357, 0.0944, 0.1135, 0.0381, 0.0187], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0273, 0.0142, 0.0181, 0.0191, 0.0190, 0.0193, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 11:08:41,987 INFO [train.py:898] (2/4) Epoch 18, batch 3500, loss[loss=0.1525, simple_loss=0.2442, pruned_loss=0.03043, over 18479.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2561, pruned_loss=0.03967, over 3593510.03 frames. ], batch size: 53, lr: 6.20e-03, grad_scale: 4.0 +2023-03-09 11:08:49,029 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65285.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:08:58,191 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65293.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:09:10,769 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65304.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:09:28,612 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.03 vs. limit=5.0 +2023-03-09 11:09:30,279 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7602, 3.0281, 4.4099, 3.7419, 2.7110, 4.7387, 4.0227, 2.9610], + device='cuda:2'), covar=tensor([0.0497, 0.1218, 0.0244, 0.0433, 0.1379, 0.0156, 0.0505, 0.0864], + device='cuda:2'), in_proj_covar=tensor([0.0200, 0.0228, 0.0195, 0.0152, 0.0217, 0.0199, 0.0234, 0.0192], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 11:09:37,554 INFO [train.py:898] (2/4) Epoch 18, batch 3550, loss[loss=0.1791, simple_loss=0.2752, pruned_loss=0.04151, over 18188.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.2562, pruned_loss=0.03946, over 3580434.42 frames. ], batch size: 60, lr: 6.20e-03, grad_scale: 4.0 +2023-03-09 11:09:41,608 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65332.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:09:45,760 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4730, 5.3019, 5.7220, 5.6729, 5.3977, 6.2405, 5.8981, 5.5465], + device='cuda:2'), covar=tensor([0.1014, 0.0501, 0.0606, 0.0634, 0.1267, 0.0598, 0.0535, 0.1452], + device='cuda:2'), in_proj_covar=tensor([0.0350, 0.0274, 0.0300, 0.0299, 0.0325, 0.0411, 0.0273, 0.0400], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:2') +2023-03-09 11:09:57,300 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65346.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:10:03,348 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65352.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:10:10,848 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65359.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:10:23,174 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.008e+02 2.567e+02 2.931e+02 3.482e+02 6.619e+02, threshold=5.862e+02, percent-clipped=0.0 +2023-03-09 11:10:31,767 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-03-09 11:10:33,186 INFO [train.py:898] (2/4) Epoch 18, batch 3600, loss[loss=0.1664, simple_loss=0.2537, pruned_loss=0.03959, over 18407.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.256, pruned_loss=0.03942, over 3589044.06 frames. ], batch size: 48, lr: 6.20e-03, grad_scale: 8.0 +2023-03-09 11:10:53,597 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 11:10:57,562 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65401.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:11:03,660 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65407.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:11:04,727 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65408.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:11:07,306 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65411.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:11:38,115 INFO [train.py:898] (2/4) Epoch 19, batch 0, loss[loss=0.175, simple_loss=0.262, pruned_loss=0.04393, over 18253.00 frames. ], tot_loss[loss=0.175, simple_loss=0.262, pruned_loss=0.04393, over 18253.00 frames. ], batch size: 47, lr: 6.03e-03, grad_scale: 8.0 +2023-03-09 11:11:38,115 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 11:11:49,827 INFO [train.py:932] (2/4) Epoch 19, validation: loss=0.1513, simple_loss=0.2518, pruned_loss=0.02538, over 944034.00 frames. +2023-03-09 11:11:49,828 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 11:11:55,723 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1709, 5.1399, 5.4167, 5.3858, 5.1619, 5.9335, 5.5498, 5.2845], + device='cuda:2'), covar=tensor([0.1176, 0.0643, 0.0687, 0.0702, 0.1369, 0.0677, 0.0709, 0.1643], + device='cuda:2'), in_proj_covar=tensor([0.0348, 0.0273, 0.0297, 0.0297, 0.0323, 0.0408, 0.0271, 0.0397], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:2') +2023-03-09 11:12:32,063 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65449.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:12:40,037 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65456.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:12:47,634 INFO [train.py:898] (2/4) Epoch 19, batch 50, loss[loss=0.1905, simple_loss=0.2777, pruned_loss=0.05167, over 18465.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.256, pruned_loss=0.0392, over 823781.84 frames. ], batch size: 59, lr: 6.03e-03, grad_scale: 8.0 +2023-03-09 11:12:55,633 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.183e+02 3.168e+02 3.745e+02 4.486e+02 9.575e+02, threshold=7.490e+02, percent-clipped=6.0 +2023-03-09 11:12:58,310 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8313, 4.6016, 4.6502, 3.4894, 3.8291, 3.5361, 2.8775, 2.4391], + device='cuda:2'), covar=tensor([0.0213, 0.0117, 0.0076, 0.0299, 0.0312, 0.0207, 0.0661, 0.0863], + device='cuda:2'), in_proj_covar=tensor([0.0067, 0.0057, 0.0060, 0.0067, 0.0086, 0.0065, 0.0075, 0.0083], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 11:13:46,499 INFO [train.py:898] (2/4) Epoch 19, batch 100, loss[loss=0.1808, simple_loss=0.265, pruned_loss=0.04833, over 17039.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2583, pruned_loss=0.04055, over 1448467.20 frames. ], batch size: 78, lr: 6.03e-03, grad_scale: 8.0 +2023-03-09 11:14:08,287 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65532.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 11:14:44,739 INFO [train.py:898] (2/4) Epoch 19, batch 150, loss[loss=0.1736, simple_loss=0.2711, pruned_loss=0.0381, over 18494.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.2573, pruned_loss=0.03999, over 1927162.21 frames. ], batch size: 51, lr: 6.02e-03, grad_scale: 4.0 +2023-03-09 11:14:53,876 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.910e+02 2.761e+02 3.150e+02 3.838e+02 8.694e+02, threshold=6.300e+02, percent-clipped=3.0 +2023-03-09 11:15:20,716 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65593.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 11:15:44,540 INFO [train.py:898] (2/4) Epoch 19, batch 200, loss[loss=0.1718, simple_loss=0.2622, pruned_loss=0.04075, over 18469.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2567, pruned_loss=0.03976, over 2292673.12 frames. ], batch size: 59, lr: 6.02e-03, grad_scale: 4.0 +2023-03-09 11:15:47,239 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7198, 5.2333, 5.1619, 5.2743, 4.7383, 5.1477, 4.5567, 5.0453], + device='cuda:2'), covar=tensor([0.0254, 0.0285, 0.0213, 0.0415, 0.0399, 0.0266, 0.1051, 0.0349], + device='cuda:2'), in_proj_covar=tensor([0.0209, 0.0256, 0.0245, 0.0316, 0.0263, 0.0262, 0.0302, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 11:16:04,441 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7152, 3.4999, 4.7423, 4.2792, 3.0674, 2.7780, 4.2184, 5.0404], + device='cuda:2'), covar=tensor([0.0765, 0.1528, 0.0199, 0.0377, 0.1001, 0.1250, 0.0406, 0.0211], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0271, 0.0143, 0.0179, 0.0191, 0.0189, 0.0192, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 11:16:06,570 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65632.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:16:17,261 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65641.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:16:43,291 INFO [train.py:898] (2/4) Epoch 19, batch 250, loss[loss=0.1491, simple_loss=0.236, pruned_loss=0.03114, over 18516.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.2564, pruned_loss=0.03955, over 2583296.47 frames. ], batch size: 44, lr: 6.02e-03, grad_scale: 4.0 +2023-03-09 11:16:52,482 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.860e+02 2.820e+02 3.379e+02 4.029e+02 8.035e+02, threshold=6.759e+02, percent-clipped=4.0 +2023-03-09 11:17:02,987 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65680.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:17:31,866 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0789, 4.3070, 2.4940, 4.1889, 5.3391, 2.6386, 3.9236, 4.1289], + device='cuda:2'), covar=tensor([0.0163, 0.1057, 0.1667, 0.0666, 0.0071, 0.1322, 0.0690, 0.0687], + device='cuda:2'), in_proj_covar=tensor([0.0156, 0.0262, 0.0200, 0.0194, 0.0116, 0.0179, 0.0212, 0.0220], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 11:17:40,402 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65711.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:17:42,352 INFO [train.py:898] (2/4) Epoch 19, batch 300, loss[loss=0.1844, simple_loss=0.2725, pruned_loss=0.04814, over 18359.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.2552, pruned_loss=0.03913, over 2805834.66 frames. ], batch size: 56, lr: 6.02e-03, grad_scale: 4.0 +2023-03-09 11:18:24,119 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-09 11:18:35,672 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65759.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:18:40,556 INFO [train.py:898] (2/4) Epoch 19, batch 350, loss[loss=0.1717, simple_loss=0.2615, pruned_loss=0.04093, over 17952.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2558, pruned_loss=0.03926, over 2987892.39 frames. ], batch size: 65, lr: 6.01e-03, grad_scale: 4.0 +2023-03-09 11:18:49,496 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.996e+02 2.649e+02 2.976e+02 3.551e+02 5.920e+02, threshold=5.952e+02, percent-clipped=0.0 +2023-03-09 11:18:51,316 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.01 vs. limit=5.0 +2023-03-09 11:19:38,386 INFO [train.py:898] (2/4) Epoch 19, batch 400, loss[loss=0.1718, simple_loss=0.2608, pruned_loss=0.0414, over 18376.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2567, pruned_loss=0.03974, over 3110983.34 frames. ], batch size: 55, lr: 6.01e-03, grad_scale: 8.0 +2023-03-09 11:20:07,711 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65838.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 11:20:13,178 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3201, 5.3108, 4.8936, 5.2005, 5.2108, 4.6075, 5.1240, 4.9177], + device='cuda:2'), covar=tensor([0.0400, 0.0428, 0.1246, 0.0839, 0.0592, 0.0455, 0.0435, 0.0995], + device='cuda:2'), in_proj_covar=tensor([0.0464, 0.0534, 0.0686, 0.0422, 0.0431, 0.0490, 0.0526, 0.0659], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 11:20:37,147 INFO [train.py:898] (2/4) Epoch 19, batch 450, loss[loss=0.1824, simple_loss=0.2675, pruned_loss=0.04862, over 18383.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2569, pruned_loss=0.03981, over 3230369.40 frames. ], batch size: 50, lr: 6.01e-03, grad_scale: 8.0 +2023-03-09 11:20:46,704 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.864e+02 3.029e+02 3.525e+02 4.091e+02 6.836e+02, threshold=7.049e+02, percent-clipped=6.0 +2023-03-09 11:21:05,984 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65888.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 11:21:18,730 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65899.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 11:21:22,717 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-09 11:21:35,174 INFO [train.py:898] (2/4) Epoch 19, batch 500, loss[loss=0.1647, simple_loss=0.2594, pruned_loss=0.03504, over 18497.00 frames. ], tot_loss[loss=0.1673, simple_loss=0.2559, pruned_loss=0.03936, over 3328072.61 frames. ], batch size: 53, lr: 6.01e-03, grad_scale: 8.0 +2023-03-09 11:22:01,543 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3747, 3.9159, 3.8712, 3.0777, 3.4079, 3.1226, 2.3757, 2.1957], + device='cuda:2'), covar=tensor([0.0247, 0.0159, 0.0099, 0.0324, 0.0340, 0.0238, 0.0743, 0.0824], + device='cuda:2'), in_proj_covar=tensor([0.0068, 0.0057, 0.0060, 0.0067, 0.0087, 0.0065, 0.0075, 0.0083], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 11:22:08,470 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65941.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:22:24,503 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 11:22:33,539 INFO [train.py:898] (2/4) Epoch 19, batch 550, loss[loss=0.1772, simple_loss=0.2676, pruned_loss=0.04345, over 18184.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2555, pruned_loss=0.03918, over 3388894.26 frames. ], batch size: 60, lr: 6.01e-03, grad_scale: 8.0 +2023-03-09 11:22:42,915 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.825e+02 2.542e+02 3.090e+02 3.456e+02 5.520e+02, threshold=6.179e+02, percent-clipped=0.0 +2023-03-09 11:23:04,307 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=65989.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:23:13,298 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7003, 4.3160, 4.3043, 3.1978, 3.6272, 3.3235, 2.4013, 2.2370], + device='cuda:2'), covar=tensor([0.0223, 0.0168, 0.0100, 0.0357, 0.0340, 0.0250, 0.0810, 0.0943], + device='cuda:2'), in_proj_covar=tensor([0.0068, 0.0057, 0.0059, 0.0067, 0.0086, 0.0065, 0.0075, 0.0083], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 11:23:35,295 INFO [train.py:898] (2/4) Epoch 19, batch 600, loss[loss=0.1674, simple_loss=0.2484, pruned_loss=0.04319, over 18398.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2557, pruned_loss=0.03931, over 3438809.85 frames. ], batch size: 48, lr: 6.00e-03, grad_scale: 8.0 +2023-03-09 11:23:49,580 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8685, 3.1064, 2.8554, 3.1158, 3.8397, 3.6823, 3.4090, 3.1666], + device='cuda:2'), covar=tensor([0.0173, 0.0268, 0.0518, 0.0371, 0.0182, 0.0151, 0.0307, 0.0389], + device='cuda:2'), in_proj_covar=tensor([0.0134, 0.0130, 0.0161, 0.0153, 0.0124, 0.0111, 0.0149, 0.0150], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 11:24:06,186 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5056, 3.4676, 2.4390, 4.4183, 3.0402, 4.1775, 2.5198, 4.1613], + device='cuda:2'), covar=tensor([0.0622, 0.0795, 0.1371, 0.0417, 0.0890, 0.0310, 0.1179, 0.0318], + device='cuda:2'), in_proj_covar=tensor([0.0208, 0.0221, 0.0188, 0.0272, 0.0188, 0.0259, 0.0198, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 11:24:17,652 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0481, 4.3851, 2.5917, 4.0553, 5.3443, 2.6041, 3.9495, 4.0425], + device='cuda:2'), covar=tensor([0.0202, 0.0991, 0.1610, 0.0726, 0.0072, 0.1363, 0.0671, 0.0688], + device='cuda:2'), in_proj_covar=tensor([0.0158, 0.0263, 0.0202, 0.0196, 0.0117, 0.0180, 0.0214, 0.0222], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 11:24:34,014 INFO [train.py:898] (2/4) Epoch 19, batch 650, loss[loss=0.1813, simple_loss=0.2752, pruned_loss=0.04372, over 18095.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2552, pruned_loss=0.03914, over 3481624.24 frames. ], batch size: 62, lr: 6.00e-03, grad_scale: 8.0 +2023-03-09 11:24:42,669 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.920e+02 2.591e+02 2.961e+02 3.650e+02 8.631e+02, threshold=5.923e+02, percent-clipped=2.0 +2023-03-09 11:24:59,032 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3693, 5.9128, 5.4605, 5.6752, 5.4297, 5.3383, 5.9410, 5.8837], + device='cuda:2'), covar=tensor([0.1125, 0.0681, 0.0442, 0.0679, 0.1502, 0.0687, 0.0558, 0.0676], + device='cuda:2'), in_proj_covar=tensor([0.0601, 0.0517, 0.0372, 0.0541, 0.0737, 0.0538, 0.0733, 0.0551], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 11:25:32,785 INFO [train.py:898] (2/4) Epoch 19, batch 700, loss[loss=0.1898, simple_loss=0.2912, pruned_loss=0.04417, over 18476.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2553, pruned_loss=0.03891, over 3508036.08 frames. ], batch size: 59, lr: 6.00e-03, grad_scale: 8.0 +2023-03-09 11:26:04,249 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5189, 5.4625, 5.1246, 5.4255, 5.3784, 4.8317, 5.3187, 5.0859], + device='cuda:2'), covar=tensor([0.0313, 0.0400, 0.1041, 0.0648, 0.0546, 0.0375, 0.0357, 0.0910], + device='cuda:2'), in_proj_covar=tensor([0.0460, 0.0533, 0.0680, 0.0417, 0.0427, 0.0486, 0.0523, 0.0655], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 11:26:09,914 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66144.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:26:27,972 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66160.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:26:31,037 INFO [train.py:898] (2/4) Epoch 19, batch 750, loss[loss=0.1802, simple_loss=0.2746, pruned_loss=0.04297, over 15715.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2557, pruned_loss=0.03915, over 3524917.10 frames. ], batch size: 94, lr: 6.00e-03, grad_scale: 8.0 +2023-03-09 11:26:40,055 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.858e+02 2.715e+02 3.373e+02 4.119e+02 8.892e+02, threshold=6.747e+02, percent-clipped=6.0 +2023-03-09 11:27:02,119 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66188.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:27:08,743 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66194.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 11:27:21,511 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66205.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 11:27:21,604 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5544, 2.2071, 2.5990, 2.6210, 2.9635, 4.4892, 4.2615, 3.6197], + device='cuda:2'), covar=tensor([0.1638, 0.2300, 0.2749, 0.1735, 0.2348, 0.0318, 0.0418, 0.0633], + device='cuda:2'), in_proj_covar=tensor([0.0293, 0.0343, 0.0373, 0.0273, 0.0390, 0.0234, 0.0296, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 11:27:30,209 INFO [train.py:898] (2/4) Epoch 19, batch 800, loss[loss=0.1689, simple_loss=0.2687, pruned_loss=0.03457, over 17134.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2562, pruned_loss=0.03959, over 3529425.90 frames. ], batch size: 78, lr: 5.99e-03, grad_scale: 8.0 +2023-03-09 11:27:39,585 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66221.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 11:27:58,060 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66236.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 11:28:23,802 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66258.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:28:29,026 INFO [train.py:898] (2/4) Epoch 19, batch 850, loss[loss=0.1559, simple_loss=0.2393, pruned_loss=0.03629, over 18422.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.2573, pruned_loss=0.03969, over 3538304.15 frames. ], batch size: 43, lr: 5.99e-03, grad_scale: 8.0 +2023-03-09 11:28:34,529 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-09 11:28:37,706 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8682, 3.8296, 4.9991, 4.4701, 3.1821, 3.0358, 4.4995, 5.2843], + device='cuda:2'), covar=tensor([0.0750, 0.1335, 0.0189, 0.0336, 0.0878, 0.1072, 0.0333, 0.0191], + device='cuda:2'), in_proj_covar=tensor([0.0145, 0.0269, 0.0141, 0.0178, 0.0188, 0.0186, 0.0190, 0.0186], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 11:28:38,315 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.829e+02 2.659e+02 3.037e+02 3.502e+02 7.094e+02, threshold=6.073e+02, percent-clipped=1.0 +2023-03-09 11:29:27,624 INFO [train.py:898] (2/4) Epoch 19, batch 900, loss[loss=0.1815, simple_loss=0.2679, pruned_loss=0.04757, over 18369.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.2564, pruned_loss=0.0396, over 3545439.05 frames. ], batch size: 56, lr: 5.99e-03, grad_scale: 8.0 +2023-03-09 11:29:34,951 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66319.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:29:52,710 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9528, 4.1546, 2.2869, 4.0521, 5.2626, 2.4807, 4.0002, 4.1620], + device='cuda:2'), covar=tensor([0.0162, 0.0935, 0.1555, 0.0575, 0.0057, 0.1145, 0.0551, 0.0552], + device='cuda:2'), in_proj_covar=tensor([0.0155, 0.0262, 0.0199, 0.0193, 0.0117, 0.0179, 0.0212, 0.0220], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 11:30:26,352 INFO [train.py:898] (2/4) Epoch 19, batch 950, loss[loss=0.1791, simple_loss=0.2733, pruned_loss=0.04245, over 18372.00 frames. ], tot_loss[loss=0.1675, simple_loss=0.2563, pruned_loss=0.03935, over 3560245.65 frames. ], batch size: 56, lr: 5.99e-03, grad_scale: 8.0 +2023-03-09 11:30:35,533 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.870e+02 2.786e+02 3.276e+02 3.823e+02 6.453e+02, threshold=6.553e+02, percent-clipped=1.0 +2023-03-09 11:31:02,045 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7621, 4.5019, 4.5100, 3.3728, 3.7873, 3.4457, 2.6601, 2.4536], + device='cuda:2'), covar=tensor([0.0205, 0.0127, 0.0095, 0.0325, 0.0322, 0.0220, 0.0767, 0.0866], + device='cuda:2'), in_proj_covar=tensor([0.0069, 0.0058, 0.0060, 0.0068, 0.0087, 0.0066, 0.0076, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 11:31:15,201 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.47 vs. limit=5.0 +2023-03-09 11:31:24,778 INFO [train.py:898] (2/4) Epoch 19, batch 1000, loss[loss=0.1692, simple_loss=0.2605, pruned_loss=0.03897, over 18484.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2553, pruned_loss=0.03932, over 3562469.62 frames. ], batch size: 53, lr: 5.99e-03, grad_scale: 8.0 +2023-03-09 11:31:49,944 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9226, 4.5550, 4.6321, 3.4179, 3.8304, 3.5124, 2.8599, 2.4989], + device='cuda:2'), covar=tensor([0.0192, 0.0130, 0.0066, 0.0301, 0.0305, 0.0214, 0.0635, 0.0805], + device='cuda:2'), in_proj_covar=tensor([0.0069, 0.0058, 0.0060, 0.0068, 0.0087, 0.0066, 0.0076, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 11:32:06,527 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6496, 2.8906, 2.6041, 3.0269, 3.7198, 3.5371, 3.1072, 2.9829], + device='cuda:2'), covar=tensor([0.0159, 0.0263, 0.0548, 0.0339, 0.0167, 0.0155, 0.0353, 0.0351], + device='cuda:2'), in_proj_covar=tensor([0.0135, 0.0131, 0.0163, 0.0153, 0.0126, 0.0111, 0.0151, 0.0152], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 11:32:16,134 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.95 vs. limit=5.0 +2023-03-09 11:32:20,737 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-09 11:32:23,345 INFO [train.py:898] (2/4) Epoch 19, batch 1050, loss[loss=0.1905, simple_loss=0.2744, pruned_loss=0.05332, over 17748.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2548, pruned_loss=0.03916, over 3564229.31 frames. ], batch size: 70, lr: 5.98e-03, grad_scale: 8.0 +2023-03-09 11:32:32,547 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.909e+02 2.933e+02 3.279e+02 4.197e+02 8.258e+02, threshold=6.558e+02, percent-clipped=3.0 +2023-03-09 11:32:59,641 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66494.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:33:06,941 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66500.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 11:33:22,933 INFO [train.py:898] (2/4) Epoch 19, batch 1100, loss[loss=0.1463, simple_loss=0.2301, pruned_loss=0.03127, over 18441.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2543, pruned_loss=0.03885, over 3575286.48 frames. ], batch size: 43, lr: 5.98e-03, grad_scale: 8.0 +2023-03-09 11:33:26,438 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66516.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 11:33:29,955 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9175, 5.3547, 5.2890, 5.3134, 4.8768, 5.3022, 4.6065, 5.2550], + device='cuda:2'), covar=tensor([0.0228, 0.0314, 0.0232, 0.0418, 0.0365, 0.0221, 0.1179, 0.0302], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0256, 0.0245, 0.0320, 0.0263, 0.0262, 0.0305, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 11:33:32,056 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66521.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:33:39,996 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8171, 3.6683, 4.8874, 2.7465, 4.2480, 2.6468, 3.0197, 1.7767], + device='cuda:2'), covar=tensor([0.1144, 0.0845, 0.0149, 0.0939, 0.0595, 0.2475, 0.2623, 0.2126], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0238, 0.0175, 0.0191, 0.0250, 0.0266, 0.0316, 0.0229], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 11:33:42,082 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5532, 4.5675, 4.6194, 4.3793, 4.3824, 4.3521, 4.6999, 4.6467], + device='cuda:2'), covar=tensor([0.0094, 0.0088, 0.0073, 0.0127, 0.0084, 0.0167, 0.0103, 0.0146], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0067, 0.0071, 0.0090, 0.0073, 0.0101, 0.0084, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 11:33:51,702 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.79 vs. limit=5.0 +2023-03-09 11:33:56,079 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66542.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:34:19,087 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4048, 5.2484, 5.6011, 5.6210, 5.3386, 6.1398, 5.8392, 5.4568], + device='cuda:2'), covar=tensor([0.1063, 0.0629, 0.0643, 0.0769, 0.1550, 0.0750, 0.0568, 0.1686], + device='cuda:2'), in_proj_covar=tensor([0.0351, 0.0275, 0.0299, 0.0301, 0.0324, 0.0414, 0.0270, 0.0400], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:2') +2023-03-09 11:34:20,984 INFO [train.py:898] (2/4) Epoch 19, batch 1150, loss[loss=0.1992, simple_loss=0.2917, pruned_loss=0.05336, over 17123.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2554, pruned_loss=0.03929, over 3580755.97 frames. ], batch size: 78, lr: 5.98e-03, grad_scale: 8.0 +2023-03-09 11:34:29,830 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.816e+02 2.725e+02 3.159e+02 3.763e+02 6.148e+02, threshold=6.318e+02, percent-clipped=0.0 +2023-03-09 11:34:36,270 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-09 11:34:42,831 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66582.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 11:35:10,277 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4456, 3.3309, 2.2122, 4.2683, 2.9642, 4.1210, 2.4087, 3.9140], + device='cuda:2'), covar=tensor([0.0625, 0.0896, 0.1432, 0.0505, 0.0912, 0.0384, 0.1168, 0.0384], + device='cuda:2'), in_proj_covar=tensor([0.0209, 0.0220, 0.0186, 0.0274, 0.0188, 0.0258, 0.0198, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 11:35:19,963 INFO [train.py:898] (2/4) Epoch 19, batch 1200, loss[loss=0.1632, simple_loss=0.2536, pruned_loss=0.03642, over 18361.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.2553, pruned_loss=0.03904, over 3582196.38 frames. ], batch size: 56, lr: 5.98e-03, grad_scale: 8.0 +2023-03-09 11:35:21,293 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66614.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:36:18,957 INFO [train.py:898] (2/4) Epoch 19, batch 1250, loss[loss=0.1662, simple_loss=0.2604, pruned_loss=0.03606, over 16165.00 frames. ], tot_loss[loss=0.1661, simple_loss=0.2547, pruned_loss=0.03871, over 3591855.59 frames. ], batch size: 94, lr: 5.97e-03, grad_scale: 8.0 +2023-03-09 11:36:27,894 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.899e+02 2.792e+02 3.362e+02 4.146e+02 6.699e+02, threshold=6.725e+02, percent-clipped=2.0 +2023-03-09 11:37:07,764 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 11:37:16,549 INFO [train.py:898] (2/4) Epoch 19, batch 1300, loss[loss=0.1639, simple_loss=0.259, pruned_loss=0.03437, over 18610.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2556, pruned_loss=0.03896, over 3597036.67 frames. ], batch size: 52, lr: 5.97e-03, grad_scale: 8.0 +2023-03-09 11:37:22,010 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5444, 2.7584, 2.5396, 2.8803, 3.5956, 3.5869, 3.0584, 2.8342], + device='cuda:2'), covar=tensor([0.0157, 0.0265, 0.0539, 0.0360, 0.0176, 0.0129, 0.0333, 0.0374], + device='cuda:2'), in_proj_covar=tensor([0.0134, 0.0131, 0.0161, 0.0154, 0.0125, 0.0110, 0.0149, 0.0151], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 11:37:37,962 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66731.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:37:40,278 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4631, 2.7686, 2.5426, 2.8381, 3.5398, 3.5184, 2.9883, 2.7872], + device='cuda:2'), covar=tensor([0.0201, 0.0250, 0.0518, 0.0364, 0.0175, 0.0124, 0.0352, 0.0349], + device='cuda:2'), in_proj_covar=tensor([0.0134, 0.0130, 0.0161, 0.0153, 0.0125, 0.0109, 0.0149, 0.0150], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 11:38:14,588 INFO [train.py:898] (2/4) Epoch 19, batch 1350, loss[loss=0.1721, simple_loss=0.2623, pruned_loss=0.04098, over 18369.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2558, pruned_loss=0.03908, over 3586131.00 frames. ], batch size: 56, lr: 5.97e-03, grad_scale: 8.0 +2023-03-09 11:38:24,645 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.905e+02 2.817e+02 3.306e+02 3.967e+02 8.245e+02, threshold=6.612e+02, percent-clipped=2.0 +2023-03-09 11:38:48,302 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66792.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 11:38:57,350 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66800.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:39:12,422 INFO [train.py:898] (2/4) Epoch 19, batch 1400, loss[loss=0.1426, simple_loss=0.2262, pruned_loss=0.0295, over 18386.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2557, pruned_loss=0.03906, over 3590075.60 frames. ], batch size: 42, lr: 5.97e-03, grad_scale: 8.0 +2023-03-09 11:39:16,777 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66816.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:39:54,053 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66848.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:40:11,320 INFO [train.py:898] (2/4) Epoch 19, batch 1450, loss[loss=0.1814, simple_loss=0.2751, pruned_loss=0.04381, over 18456.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2555, pruned_loss=0.03906, over 3583886.14 frames. ], batch size: 59, lr: 5.97e-03, grad_scale: 8.0 +2023-03-09 11:40:12,676 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66864.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 11:40:21,463 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.935e+02 2.720e+02 3.294e+02 4.147e+02 8.956e+02, threshold=6.588e+02, percent-clipped=4.0 +2023-03-09 11:40:28,889 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66877.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 11:41:10,288 INFO [train.py:898] (2/4) Epoch 19, batch 1500, loss[loss=0.1425, simple_loss=0.2254, pruned_loss=0.0298, over 18386.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.2554, pruned_loss=0.03905, over 3585163.30 frames. ], batch size: 42, lr: 5.96e-03, grad_scale: 8.0 +2023-03-09 11:41:11,792 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66914.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:41:33,206 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5969, 2.4407, 2.5962, 2.6052, 3.2377, 4.8678, 4.6709, 3.2195], + device='cuda:2'), covar=tensor([0.1618, 0.2172, 0.2808, 0.1733, 0.2052, 0.0186, 0.0352, 0.0940], + device='cuda:2'), in_proj_covar=tensor([0.0291, 0.0339, 0.0370, 0.0271, 0.0387, 0.0232, 0.0292, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 11:42:02,728 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66957.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:42:08,458 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=66962.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:42:09,393 INFO [train.py:898] (2/4) Epoch 19, batch 1550, loss[loss=0.167, simple_loss=0.2561, pruned_loss=0.03898, over 18406.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2559, pruned_loss=0.03927, over 3587207.98 frames. ], batch size: 52, lr: 5.96e-03, grad_scale: 8.0 +2023-03-09 11:42:18,929 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.827e+02 2.644e+02 3.102e+02 3.654e+02 6.872e+02, threshold=6.204e+02, percent-clipped=2.0 +2023-03-09 11:42:22,766 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-09 11:42:39,150 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1295, 3.4151, 3.3916, 2.8944, 3.0293, 2.8109, 2.4450, 2.3261], + device='cuda:2'), covar=tensor([0.0257, 0.0171, 0.0136, 0.0299, 0.0359, 0.0242, 0.0642, 0.0738], + device='cuda:2'), in_proj_covar=tensor([0.0069, 0.0058, 0.0060, 0.0068, 0.0087, 0.0066, 0.0076, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 11:43:07,409 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4177, 3.3750, 3.2700, 3.0169, 3.2129, 2.6528, 2.6630, 3.4340], + device='cuda:2'), covar=tensor([0.0069, 0.0096, 0.0075, 0.0121, 0.0087, 0.0180, 0.0196, 0.0066], + device='cuda:2'), in_proj_covar=tensor([0.0130, 0.0154, 0.0129, 0.0181, 0.0135, 0.0174, 0.0178, 0.0115], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 11:43:08,126 INFO [train.py:898] (2/4) Epoch 19, batch 1600, loss[loss=0.1666, simple_loss=0.2631, pruned_loss=0.03507, over 17686.00 frames. ], tot_loss[loss=0.1671, simple_loss=0.2559, pruned_loss=0.03917, over 3585877.31 frames. ], batch size: 70, lr: 5.96e-03, grad_scale: 8.0 +2023-03-09 11:43:14,599 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67018.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 11:44:06,912 INFO [train.py:898] (2/4) Epoch 19, batch 1650, loss[loss=0.1436, simple_loss=0.2282, pruned_loss=0.02946, over 18253.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2557, pruned_loss=0.03903, over 3601125.72 frames. ], batch size: 47, lr: 5.96e-03, grad_scale: 8.0 +2023-03-09 11:44:16,362 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.888e+02 2.665e+02 3.285e+02 3.842e+02 7.636e+02, threshold=6.571e+02, percent-clipped=3.0 +2023-03-09 11:44:31,191 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67083.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:44:36,658 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67087.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:45:05,335 INFO [train.py:898] (2/4) Epoch 19, batch 1700, loss[loss=0.1838, simple_loss=0.2778, pruned_loss=0.04486, over 18499.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.2562, pruned_loss=0.03948, over 3600418.32 frames. ], batch size: 53, lr: 5.95e-03, grad_scale: 8.0 +2023-03-09 11:45:43,266 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67144.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:45:45,717 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.56 vs. limit=2.0 +2023-03-09 11:46:04,411 INFO [train.py:898] (2/4) Epoch 19, batch 1750, loss[loss=0.1648, simple_loss=0.2615, pruned_loss=0.03399, over 18538.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2556, pruned_loss=0.03918, over 3588488.48 frames. ], batch size: 49, lr: 5.95e-03, grad_scale: 8.0 +2023-03-09 11:46:13,339 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.723e+02 2.728e+02 3.267e+02 4.070e+02 1.061e+03, threshold=6.534e+02, percent-clipped=5.0 +2023-03-09 11:46:21,084 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67177.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:47:02,916 INFO [train.py:898] (2/4) Epoch 19, batch 1800, loss[loss=0.1654, simple_loss=0.2441, pruned_loss=0.04332, over 18514.00 frames. ], tot_loss[loss=0.1673, simple_loss=0.2561, pruned_loss=0.03925, over 3587801.09 frames. ], batch size: 47, lr: 5.95e-03, grad_scale: 8.0 +2023-03-09 11:47:17,352 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67225.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:47:59,741 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8711, 3.8514, 3.6950, 3.4213, 3.5984, 3.0448, 3.0596, 3.7612], + device='cuda:2'), covar=tensor([0.0052, 0.0072, 0.0071, 0.0117, 0.0095, 0.0175, 0.0178, 0.0083], + device='cuda:2'), in_proj_covar=tensor([0.0129, 0.0153, 0.0128, 0.0180, 0.0135, 0.0174, 0.0176, 0.0114], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 11:48:01,514 INFO [train.py:898] (2/4) Epoch 19, batch 1850, loss[loss=0.2057, simple_loss=0.2896, pruned_loss=0.06086, over 12584.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2559, pruned_loss=0.03923, over 3579334.24 frames. ], batch size: 129, lr: 5.95e-03, grad_scale: 8.0 +2023-03-09 11:48:03,430 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-09 11:48:10,593 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.962e+02 2.832e+02 3.331e+02 4.104e+02 8.145e+02, threshold=6.662e+02, percent-clipped=3.0 +2023-03-09 11:49:00,066 INFO [train.py:898] (2/4) Epoch 19, batch 1900, loss[loss=0.2044, simple_loss=0.2803, pruned_loss=0.06422, over 12327.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2556, pruned_loss=0.03925, over 3570004.34 frames. ], batch size: 129, lr: 5.95e-03, grad_scale: 4.0 +2023-03-09 11:49:00,274 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67313.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 11:49:13,972 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6106, 2.7391, 4.3416, 3.8299, 2.5802, 4.5795, 3.8272, 2.6300], + device='cuda:2'), covar=tensor([0.0502, 0.1569, 0.0276, 0.0388, 0.1704, 0.0200, 0.0563, 0.1205], + device='cuda:2'), in_proj_covar=tensor([0.0201, 0.0229, 0.0196, 0.0155, 0.0217, 0.0201, 0.0234, 0.0192], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 11:49:58,288 INFO [train.py:898] (2/4) Epoch 19, batch 1950, loss[loss=0.1564, simple_loss=0.2484, pruned_loss=0.03219, over 17848.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.255, pruned_loss=0.03924, over 3570206.94 frames. ], batch size: 70, lr: 5.94e-03, grad_scale: 4.0 +2023-03-09 11:50:03,337 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3602, 2.9081, 2.2882, 2.8633, 3.5111, 3.3742, 3.1205, 2.8626], + device='cuda:2'), covar=tensor([0.0179, 0.0200, 0.0646, 0.0336, 0.0174, 0.0155, 0.0291, 0.0337], + device='cuda:2'), in_proj_covar=tensor([0.0135, 0.0130, 0.0162, 0.0153, 0.0126, 0.0111, 0.0149, 0.0150], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 11:50:08,472 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.842e+02 2.816e+02 3.321e+02 4.112e+02 1.785e+03, threshold=6.643e+02, percent-clipped=3.0 +2023-03-09 11:50:10,439 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-03-09 11:50:26,415 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67387.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 11:50:57,634 INFO [train.py:898] (2/4) Epoch 19, batch 2000, loss[loss=0.1774, simple_loss=0.2673, pruned_loss=0.04374, over 18299.00 frames. ], tot_loss[loss=0.1675, simple_loss=0.256, pruned_loss=0.03944, over 3569512.27 frames. ], batch size: 57, lr: 5.94e-03, grad_scale: 8.0 +2023-03-09 11:51:22,925 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67435.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:51:27,935 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67439.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:51:36,187 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-09 11:51:46,657 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-03-09 11:51:56,183 INFO [train.py:898] (2/4) Epoch 19, batch 2050, loss[loss=0.1752, simple_loss=0.2586, pruned_loss=0.0459, over 18357.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2555, pruned_loss=0.03912, over 3578618.58 frames. ], batch size: 56, lr: 5.94e-03, grad_scale: 8.0 +2023-03-09 11:52:06,262 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.056e+02 2.683e+02 3.184e+02 3.899e+02 7.354e+02, threshold=6.369e+02, percent-clipped=1.0 +2023-03-09 11:52:44,384 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-09 11:52:54,239 INFO [train.py:898] (2/4) Epoch 19, batch 2100, loss[loss=0.1641, simple_loss=0.2611, pruned_loss=0.03356, over 18105.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2556, pruned_loss=0.03912, over 3586778.90 frames. ], batch size: 62, lr: 5.94e-03, grad_scale: 8.0 +2023-03-09 11:52:54,543 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67513.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:52:56,032 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.11 vs. limit=5.0 +2023-03-09 11:53:38,611 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5625, 2.8596, 2.6561, 2.8953, 3.6778, 3.5458, 3.1956, 2.8473], + device='cuda:2'), covar=tensor([0.0155, 0.0262, 0.0536, 0.0370, 0.0189, 0.0145, 0.0348, 0.0359], + device='cuda:2'), in_proj_covar=tensor([0.0136, 0.0130, 0.0162, 0.0153, 0.0127, 0.0112, 0.0150, 0.0151], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 11:53:52,384 INFO [train.py:898] (2/4) Epoch 19, batch 2150, loss[loss=0.1564, simple_loss=0.2473, pruned_loss=0.0327, over 18280.00 frames. ], tot_loss[loss=0.1675, simple_loss=0.2562, pruned_loss=0.03934, over 3569393.83 frames. ], batch size: 49, lr: 5.93e-03, grad_scale: 8.0 +2023-03-09 11:54:03,261 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.859e+02 2.589e+02 3.256e+02 4.119e+02 1.040e+03, threshold=6.512e+02, percent-clipped=4.0 +2023-03-09 11:54:05,947 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67574.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 11:54:41,309 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.62 vs. limit=2.0 +2023-03-09 11:54:43,504 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.53 vs. limit=2.0 +2023-03-09 11:54:51,815 INFO [train.py:898] (2/4) Epoch 19, batch 2200, loss[loss=0.1668, simple_loss=0.2646, pruned_loss=0.0345, over 18417.00 frames. ], tot_loss[loss=0.1675, simple_loss=0.2564, pruned_loss=0.03927, over 3572198.20 frames. ], batch size: 52, lr: 5.93e-03, grad_scale: 8.0 +2023-03-09 11:54:52,089 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67613.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:55:48,946 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67661.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:55:51,015 INFO [train.py:898] (2/4) Epoch 19, batch 2250, loss[loss=0.1792, simple_loss=0.2703, pruned_loss=0.04408, over 16233.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2573, pruned_loss=0.03928, over 3557139.29 frames. ], batch size: 94, lr: 5.93e-03, grad_scale: 8.0 +2023-03-09 11:56:01,705 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.067e+02 2.672e+02 3.118e+02 3.558e+02 7.247e+02, threshold=6.237e+02, percent-clipped=1.0 +2023-03-09 11:56:07,778 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8122, 2.9229, 4.3733, 3.8101, 2.9171, 4.7462, 3.9812, 3.1343], + device='cuda:2'), covar=tensor([0.0459, 0.1408, 0.0280, 0.0395, 0.1345, 0.0208, 0.0491, 0.0860], + device='cuda:2'), in_proj_covar=tensor([0.0204, 0.0233, 0.0199, 0.0157, 0.0221, 0.0203, 0.0237, 0.0194], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 11:56:09,181 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.03 vs. limit=5.0 +2023-03-09 11:56:11,178 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67680.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:56:25,079 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67692.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:56:39,486 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6769, 3.4307, 4.6945, 2.8132, 4.0430, 2.4906, 2.9176, 1.8755], + device='cuda:2'), covar=tensor([0.1132, 0.0891, 0.0182, 0.0865, 0.0635, 0.2459, 0.2448, 0.2052], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0235, 0.0174, 0.0189, 0.0249, 0.0264, 0.0312, 0.0226], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 11:56:43,427 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5830, 2.9039, 2.6116, 2.9047, 3.6940, 3.5739, 3.1079, 2.8232], + device='cuda:2'), covar=tensor([0.0166, 0.0254, 0.0524, 0.0352, 0.0161, 0.0139, 0.0379, 0.0363], + device='cuda:2'), in_proj_covar=tensor([0.0137, 0.0131, 0.0163, 0.0156, 0.0129, 0.0113, 0.0152, 0.0153], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 11:56:50,298 INFO [train.py:898] (2/4) Epoch 19, batch 2300, loss[loss=0.1672, simple_loss=0.2666, pruned_loss=0.03388, over 18311.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2572, pruned_loss=0.03904, over 3574713.19 frames. ], batch size: 54, lr: 5.93e-03, grad_scale: 8.0 +2023-03-09 11:57:20,984 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67739.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:57:23,485 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67741.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:57:37,078 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67753.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 11:57:48,522 INFO [train.py:898] (2/4) Epoch 19, batch 2350, loss[loss=0.1927, simple_loss=0.276, pruned_loss=0.05469, over 18378.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2571, pruned_loss=0.03939, over 3582834.68 frames. ], batch size: 52, lr: 5.93e-03, grad_scale: 8.0 +2023-03-09 11:57:59,071 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.915e+02 2.613e+02 3.195e+02 3.840e+02 8.434e+02, threshold=6.389e+02, percent-clipped=1.0 +2023-03-09 11:58:16,650 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=67787.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 11:58:47,098 INFO [train.py:898] (2/4) Epoch 19, batch 2400, loss[loss=0.1894, simple_loss=0.2785, pruned_loss=0.05013, over 18303.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2569, pruned_loss=0.03953, over 3593413.72 frames. ], batch size: 57, lr: 5.92e-03, grad_scale: 8.0 +2023-03-09 11:59:45,735 INFO [train.py:898] (2/4) Epoch 19, batch 2450, loss[loss=0.1578, simple_loss=0.2482, pruned_loss=0.03371, over 18359.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2562, pruned_loss=0.03891, over 3601978.45 frames. ], batch size: 50, lr: 5.92e-03, grad_scale: 8.0 +2023-03-09 11:59:54,027 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67869.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 11:59:57,189 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.025e+02 2.777e+02 3.368e+02 4.107e+02 6.941e+02, threshold=6.736e+02, percent-clipped=1.0 +2023-03-09 12:00:01,090 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9436, 3.8776, 3.7712, 3.4315, 3.6840, 3.0667, 3.0743, 3.9140], + device='cuda:2'), covar=tensor([0.0049, 0.0083, 0.0071, 0.0104, 0.0081, 0.0161, 0.0173, 0.0066], + device='cuda:2'), in_proj_covar=tensor([0.0130, 0.0152, 0.0129, 0.0182, 0.0135, 0.0174, 0.0177, 0.0114], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 12:00:36,113 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9600, 4.5766, 4.7003, 3.5328, 3.8279, 3.5809, 2.7492, 2.6760], + device='cuda:2'), covar=tensor([0.0192, 0.0173, 0.0084, 0.0296, 0.0364, 0.0231, 0.0727, 0.0818], + device='cuda:2'), in_proj_covar=tensor([0.0069, 0.0058, 0.0061, 0.0067, 0.0088, 0.0066, 0.0076, 0.0083], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 12:00:44,900 INFO [train.py:898] (2/4) Epoch 19, batch 2500, loss[loss=0.1681, simple_loss=0.2639, pruned_loss=0.03612, over 16949.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.256, pruned_loss=0.03892, over 3590853.25 frames. ], batch size: 78, lr: 5.92e-03, grad_scale: 8.0 +2023-03-09 12:01:29,136 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-09 12:01:43,850 INFO [train.py:898] (2/4) Epoch 19, batch 2550, loss[loss=0.168, simple_loss=0.2672, pruned_loss=0.03443, over 18274.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2554, pruned_loss=0.03883, over 3578628.34 frames. ], batch size: 57, lr: 5.92e-03, grad_scale: 4.0 +2023-03-09 12:01:56,272 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.852e+02 2.690e+02 3.137e+02 3.674e+02 8.020e+02, threshold=6.273e+02, percent-clipped=2.0 +2023-03-09 12:02:06,092 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67981.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:02:47,652 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-09 12:02:48,102 INFO [train.py:898] (2/4) Epoch 19, batch 2600, loss[loss=0.224, simple_loss=0.297, pruned_loss=0.07556, over 12874.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2557, pruned_loss=0.03914, over 3567690.34 frames. ], batch size: 130, lr: 5.91e-03, grad_scale: 4.0 +2023-03-09 12:02:51,774 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8549, 4.8447, 4.9139, 4.6264, 4.6993, 4.7050, 4.9758, 5.0398], + device='cuda:2'), covar=tensor([0.0066, 0.0071, 0.0064, 0.0107, 0.0066, 0.0143, 0.0069, 0.0097], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0067, 0.0071, 0.0089, 0.0072, 0.0099, 0.0084, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 12:03:16,206 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68036.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:03:23,039 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68042.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:03:25,665 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 12:03:29,645 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68048.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 12:03:33,516 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-09 12:03:47,000 INFO [train.py:898] (2/4) Epoch 19, batch 2650, loss[loss=0.163, simple_loss=0.2471, pruned_loss=0.03942, over 18269.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2562, pruned_loss=0.03959, over 3574011.28 frames. ], batch size: 47, lr: 5.91e-03, grad_scale: 4.0 +2023-03-09 12:03:58,870 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.938e+02 2.796e+02 3.306e+02 3.948e+02 6.802e+02, threshold=6.612e+02, percent-clipped=2.0 +2023-03-09 12:04:16,031 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68087.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 12:04:38,394 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7697, 3.1237, 4.3317, 3.8318, 2.7542, 4.7148, 3.9960, 2.9945], + device='cuda:2'), covar=tensor([0.0504, 0.1261, 0.0282, 0.0414, 0.1504, 0.0181, 0.0504, 0.0928], + device='cuda:2'), in_proj_covar=tensor([0.0204, 0.0233, 0.0199, 0.0157, 0.0220, 0.0204, 0.0238, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 12:04:44,747 INFO [train.py:898] (2/4) Epoch 19, batch 2700, loss[loss=0.185, simple_loss=0.2763, pruned_loss=0.04686, over 18373.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2554, pruned_loss=0.03918, over 3588742.71 frames. ], batch size: 56, lr: 5.91e-03, grad_scale: 4.0 +2023-03-09 12:05:06,372 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4824, 5.9434, 5.5550, 5.7769, 5.5233, 5.4026, 6.0444, 5.9118], + device='cuda:2'), covar=tensor([0.1080, 0.0820, 0.0490, 0.0683, 0.1460, 0.0729, 0.0532, 0.0830], + device='cuda:2'), in_proj_covar=tensor([0.0600, 0.0513, 0.0372, 0.0539, 0.0727, 0.0531, 0.0725, 0.0555], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 12:05:27,172 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68148.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 12:05:30,469 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7668, 3.7194, 3.5794, 3.1717, 3.4184, 2.8307, 2.8582, 3.7352], + device='cuda:2'), covar=tensor([0.0055, 0.0079, 0.0070, 0.0133, 0.0090, 0.0183, 0.0191, 0.0060], + device='cuda:2'), in_proj_covar=tensor([0.0129, 0.0151, 0.0127, 0.0179, 0.0135, 0.0172, 0.0174, 0.0113], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 12:05:43,838 INFO [train.py:898] (2/4) Epoch 19, batch 2750, loss[loss=0.1474, simple_loss=0.234, pruned_loss=0.03039, over 18503.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.256, pruned_loss=0.03923, over 3598232.33 frames. ], batch size: 47, lr: 5.91e-03, grad_scale: 4.0 +2023-03-09 12:05:44,441 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6012, 2.1836, 2.6371, 2.8170, 3.2506, 4.9153, 4.8268, 3.2284], + device='cuda:2'), covar=tensor([0.1815, 0.2482, 0.2962, 0.1685, 0.2332, 0.0192, 0.0327, 0.0954], + device='cuda:2'), in_proj_covar=tensor([0.0295, 0.0343, 0.0374, 0.0275, 0.0391, 0.0235, 0.0293, 0.0250], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 12:05:51,604 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68169.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:05:55,854 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.532e+02 2.565e+02 3.131e+02 3.754e+02 7.044e+02, threshold=6.262e+02, percent-clipped=1.0 +2023-03-09 12:06:42,968 INFO [train.py:898] (2/4) Epoch 19, batch 2800, loss[loss=0.1922, simple_loss=0.2827, pruned_loss=0.05087, over 18364.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.2563, pruned_loss=0.03929, over 3602500.61 frames. ], batch size: 56, lr: 5.91e-03, grad_scale: 8.0 +2023-03-09 12:06:47,656 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68217.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:06:51,173 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-09 12:07:04,600 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.74 vs. limit=2.0 +2023-03-09 12:07:41,670 INFO [train.py:898] (2/4) Epoch 19, batch 2850, loss[loss=0.1959, simple_loss=0.2877, pruned_loss=0.05201, over 17707.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.257, pruned_loss=0.03934, over 3596247.73 frames. ], batch size: 70, lr: 5.90e-03, grad_scale: 8.0 +2023-03-09 12:07:48,015 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-09 12:07:53,622 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.861e+02 2.563e+02 3.164e+02 3.682e+02 9.840e+02, threshold=6.328e+02, percent-clipped=2.0 +2023-03-09 12:08:41,528 INFO [train.py:898] (2/4) Epoch 19, batch 2900, loss[loss=0.161, simple_loss=0.241, pruned_loss=0.04049, over 18397.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.2573, pruned_loss=0.03965, over 3576745.86 frames. ], batch size: 48, lr: 5.90e-03, grad_scale: 8.0 +2023-03-09 12:08:57,290 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9459, 4.9902, 5.0180, 4.7399, 4.7579, 4.8248, 5.1070, 5.1280], + device='cuda:2'), covar=tensor([0.0072, 0.0063, 0.0065, 0.0111, 0.0064, 0.0143, 0.0065, 0.0082], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0067, 0.0071, 0.0089, 0.0073, 0.0100, 0.0084, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 12:09:01,253 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-09 12:09:05,557 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5267, 2.2801, 2.4861, 2.5192, 2.9331, 4.2957, 4.0457, 3.3207], + device='cuda:2'), covar=tensor([0.1740, 0.2318, 0.2774, 0.1868, 0.2402, 0.0305, 0.0505, 0.0777], + device='cuda:2'), in_proj_covar=tensor([0.0293, 0.0341, 0.0372, 0.0273, 0.0388, 0.0235, 0.0291, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 12:09:09,323 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68336.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:09:10,303 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68337.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:09:12,935 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1098, 4.2408, 2.4327, 4.1484, 5.3461, 2.5763, 3.9439, 4.0449], + device='cuda:2'), covar=tensor([0.0156, 0.1093, 0.1625, 0.0609, 0.0075, 0.1237, 0.0608, 0.0669], + device='cuda:2'), in_proj_covar=tensor([0.0158, 0.0265, 0.0202, 0.0193, 0.0121, 0.0180, 0.0214, 0.0224], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 12:09:23,488 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68348.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:09:37,337 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7346, 5.2279, 5.2243, 5.2119, 4.7610, 5.1007, 4.5823, 5.1010], + device='cuda:2'), covar=tensor([0.0230, 0.0278, 0.0177, 0.0352, 0.0345, 0.0220, 0.1008, 0.0255], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0256, 0.0244, 0.0316, 0.0261, 0.0261, 0.0299, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 12:09:40,263 INFO [train.py:898] (2/4) Epoch 19, batch 2950, loss[loss=0.1694, simple_loss=0.2582, pruned_loss=0.04025, over 18413.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2575, pruned_loss=0.03965, over 3566181.39 frames. ], batch size: 52, lr: 5.90e-03, grad_scale: 8.0 +2023-03-09 12:09:47,346 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1022, 5.1826, 5.2072, 4.9481, 4.8364, 4.9763, 5.2515, 5.3422], + device='cuda:2'), covar=tensor([0.0072, 0.0064, 0.0055, 0.0093, 0.0066, 0.0145, 0.0069, 0.0089], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0067, 0.0071, 0.0089, 0.0073, 0.0100, 0.0084, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 12:09:51,484 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.776e+02 2.615e+02 3.161e+02 3.773e+02 8.205e+02, threshold=6.323e+02, percent-clipped=2.0 +2023-03-09 12:10:04,942 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68384.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:10:19,667 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68396.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:10:23,220 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68399.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:10:31,351 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.68 vs. limit=5.0 +2023-03-09 12:10:39,475 INFO [train.py:898] (2/4) Epoch 19, batch 3000, loss[loss=0.2009, simple_loss=0.2837, pruned_loss=0.05909, over 18144.00 frames. ], tot_loss[loss=0.1675, simple_loss=0.2566, pruned_loss=0.03917, over 3580821.13 frames. ], batch size: 62, lr: 5.90e-03, grad_scale: 8.0 +2023-03-09 12:10:39,475 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 12:10:51,586 INFO [train.py:932] (2/4) Epoch 19, validation: loss=0.1511, simple_loss=0.2509, pruned_loss=0.02564, over 944034.00 frames. +2023-03-09 12:10:51,587 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 12:10:57,804 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 12:11:24,943 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.1952, 4.1103, 3.9440, 4.1262, 4.1330, 3.6995, 4.1017, 3.9942], + device='cuda:2'), covar=tensor([0.0478, 0.0790, 0.1273, 0.0716, 0.0617, 0.0449, 0.0513, 0.0911], + device='cuda:2'), in_proj_covar=tensor([0.0467, 0.0539, 0.0677, 0.0419, 0.0434, 0.0485, 0.0533, 0.0659], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 12:11:27,662 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68443.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 12:11:47,349 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68460.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:11:48,906 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.49 vs. limit=5.0 +2023-03-09 12:11:50,384 INFO [train.py:898] (2/4) Epoch 19, batch 3050, loss[loss=0.1633, simple_loss=0.2494, pruned_loss=0.03861, over 18540.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2559, pruned_loss=0.03922, over 3575955.35 frames. ], batch size: 49, lr: 5.90e-03, grad_scale: 8.0 +2023-03-09 12:12:02,235 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.030e+02 2.743e+02 3.169e+02 3.811e+02 7.810e+02, threshold=6.338e+02, percent-clipped=1.0 +2023-03-09 12:12:49,526 INFO [train.py:898] (2/4) Epoch 19, batch 3100, loss[loss=0.1635, simple_loss=0.2403, pruned_loss=0.0433, over 17712.00 frames. ], tot_loss[loss=0.1666, simple_loss=0.2552, pruned_loss=0.039, over 3576446.65 frames. ], batch size: 39, lr: 5.89e-03, grad_scale: 8.0 +2023-03-09 12:13:25,376 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4025, 2.7976, 2.4025, 2.8133, 3.5476, 3.3196, 2.9828, 2.8295], + device='cuda:2'), covar=tensor([0.0174, 0.0285, 0.0565, 0.0311, 0.0183, 0.0175, 0.0336, 0.0318], + device='cuda:2'), in_proj_covar=tensor([0.0135, 0.0129, 0.0159, 0.0152, 0.0126, 0.0111, 0.0149, 0.0149], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 12:13:48,206 INFO [train.py:898] (2/4) Epoch 19, batch 3150, loss[loss=0.1452, simple_loss=0.2248, pruned_loss=0.03284, over 18392.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.255, pruned_loss=0.03902, over 3580300.85 frames. ], batch size: 42, lr: 5.89e-03, grad_scale: 8.0 +2023-03-09 12:13:59,949 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.941e+02 2.741e+02 3.192e+02 3.844e+02 7.803e+02, threshold=6.385e+02, percent-clipped=3.0 +2023-03-09 12:14:05,945 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68578.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:14:12,809 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4654, 2.7706, 2.3032, 2.8353, 3.5468, 3.3772, 2.9865, 2.8623], + device='cuda:2'), covar=tensor([0.0209, 0.0310, 0.0678, 0.0361, 0.0203, 0.0161, 0.0401, 0.0352], + device='cuda:2'), in_proj_covar=tensor([0.0136, 0.0129, 0.0160, 0.0152, 0.0126, 0.0112, 0.0149, 0.0150], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 12:14:14,351 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.49 vs. limit=5.0 +2023-03-09 12:14:46,957 INFO [train.py:898] (2/4) Epoch 19, batch 3200, loss[loss=0.1696, simple_loss=0.2576, pruned_loss=0.04086, over 16047.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.2555, pruned_loss=0.03898, over 3579512.60 frames. ], batch size: 94, lr: 5.89e-03, grad_scale: 8.0 +2023-03-09 12:14:56,926 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-03-09 12:15:16,386 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68637.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:15:16,907 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.79 vs. limit=2.0 +2023-03-09 12:15:18,757 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68639.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:15:46,041 INFO [train.py:898] (2/4) Epoch 19, batch 3250, loss[loss=0.1586, simple_loss=0.2555, pruned_loss=0.03086, over 18343.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2557, pruned_loss=0.03913, over 3572958.25 frames. ], batch size: 55, lr: 5.89e-03, grad_scale: 8.0 +2023-03-09 12:15:57,317 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.558e+02 2.445e+02 3.074e+02 3.780e+02 1.190e+03, threshold=6.148e+02, percent-clipped=4.0 +2023-03-09 12:16:10,685 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4707, 3.3053, 1.9054, 4.2697, 2.8898, 4.1051, 2.2345, 3.6095], + device='cuda:2'), covar=tensor([0.0634, 0.0812, 0.1560, 0.0456, 0.0907, 0.0285, 0.1193, 0.0483], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0222, 0.0186, 0.0279, 0.0190, 0.0259, 0.0200, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 12:16:12,192 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68685.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:16:12,394 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68685.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:16:45,045 INFO [train.py:898] (2/4) Epoch 19, batch 3300, loss[loss=0.1631, simple_loss=0.2517, pruned_loss=0.03725, over 18377.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2553, pruned_loss=0.03885, over 3585851.09 frames. ], batch size: 50, lr: 5.88e-03, grad_scale: 8.0 +2023-03-09 12:17:20,794 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68743.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 12:17:24,257 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68746.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:17:34,145 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68755.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:17:43,486 INFO [train.py:898] (2/4) Epoch 19, batch 3350, loss[loss=0.1622, simple_loss=0.2582, pruned_loss=0.03312, over 18401.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2551, pruned_loss=0.03873, over 3577136.94 frames. ], batch size: 52, lr: 5.88e-03, grad_scale: 8.0 +2023-03-09 12:17:45,448 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.74 vs. limit=2.0 +2023-03-09 12:17:54,607 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.843e+02 2.741e+02 3.273e+02 4.091e+02 6.319e+02, threshold=6.545e+02, percent-clipped=1.0 +2023-03-09 12:18:16,628 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=68791.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 12:18:42,916 INFO [train.py:898] (2/4) Epoch 19, batch 3400, loss[loss=0.1598, simple_loss=0.2555, pruned_loss=0.03202, over 18314.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.254, pruned_loss=0.03809, over 3579298.34 frames. ], batch size: 54, lr: 5.88e-03, grad_scale: 8.0 +2023-03-09 12:19:35,567 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6271, 2.2396, 2.5988, 2.6354, 3.2975, 4.9312, 4.6174, 3.5134], + device='cuda:2'), covar=tensor([0.1772, 0.2426, 0.2980, 0.1836, 0.2253, 0.0219, 0.0387, 0.0846], + device='cuda:2'), in_proj_covar=tensor([0.0295, 0.0343, 0.0374, 0.0274, 0.0389, 0.0237, 0.0293, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 12:19:41,745 INFO [train.py:898] (2/4) Epoch 19, batch 3450, loss[loss=0.1661, simple_loss=0.2623, pruned_loss=0.03497, over 18612.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2543, pruned_loss=0.03804, over 3590951.21 frames. ], batch size: 52, lr: 5.88e-03, grad_scale: 8.0 +2023-03-09 12:19:52,899 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9046, 4.6017, 4.6747, 3.5912, 3.8324, 3.4527, 3.1033, 2.7094], + device='cuda:2'), covar=tensor([0.0223, 0.0149, 0.0062, 0.0263, 0.0349, 0.0266, 0.0590, 0.0772], + device='cuda:2'), in_proj_covar=tensor([0.0068, 0.0057, 0.0060, 0.0067, 0.0086, 0.0065, 0.0075, 0.0081], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 12:19:53,565 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.993e+02 2.660e+02 3.004e+02 3.762e+02 7.210e+02, threshold=6.009e+02, percent-clipped=0.0 +2023-03-09 12:20:40,020 INFO [train.py:898] (2/4) Epoch 19, batch 3500, loss[loss=0.1688, simple_loss=0.2584, pruned_loss=0.03962, over 16163.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.2554, pruned_loss=0.03856, over 3574512.94 frames. ], batch size: 94, lr: 5.88e-03, grad_scale: 8.0 +2023-03-09 12:20:42,877 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7300, 3.8047, 5.0900, 4.5440, 3.4038, 3.1336, 4.5434, 5.3346], + device='cuda:2'), covar=tensor([0.0784, 0.1457, 0.0160, 0.0314, 0.0812, 0.1039, 0.0333, 0.0173], + device='cuda:2'), in_proj_covar=tensor([0.0146, 0.0270, 0.0146, 0.0180, 0.0189, 0.0187, 0.0190, 0.0190], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 12:21:04,402 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68934.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:21:35,934 INFO [train.py:898] (2/4) Epoch 19, batch 3550, loss[loss=0.1492, simple_loss=0.239, pruned_loss=0.02969, over 18363.00 frames. ], tot_loss[loss=0.1664, simple_loss=0.2555, pruned_loss=0.03863, over 3581640.01 frames. ], batch size: 46, lr: 5.87e-03, grad_scale: 8.0 +2023-03-09 12:21:46,790 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.911e+02 2.828e+02 3.238e+02 3.903e+02 1.381e+03, threshold=6.476e+02, percent-clipped=5.0 +2023-03-09 12:21:47,531 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.25 vs. limit=5.0 +2023-03-09 12:22:30,455 INFO [train.py:898] (2/4) Epoch 19, batch 3600, loss[loss=0.1547, simple_loss=0.2455, pruned_loss=0.03198, over 18303.00 frames. ], tot_loss[loss=0.1664, simple_loss=0.2556, pruned_loss=0.03859, over 3592744.60 frames. ], batch size: 49, lr: 5.87e-03, grad_scale: 8.0 +2023-03-09 12:22:59,602 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69041.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:23:01,635 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69043.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:23:34,668 INFO [train.py:898] (2/4) Epoch 20, batch 0, loss[loss=0.1716, simple_loss=0.2449, pruned_loss=0.04909, over 18137.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2449, pruned_loss=0.04909, over 18137.00 frames. ], batch size: 44, lr: 5.72e-03, grad_scale: 8.0 +2023-03-09 12:23:34,669 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 12:23:39,475 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.2082, 2.5855, 3.5394, 3.2890, 2.8209, 2.6626, 3.2651, 3.6352], + device='cuda:2'), covar=tensor([0.0956, 0.1721, 0.0254, 0.0467, 0.0927, 0.1171, 0.0499, 0.0442], + device='cuda:2'), in_proj_covar=tensor([0.0145, 0.0268, 0.0145, 0.0178, 0.0189, 0.0185, 0.0189, 0.0189], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 12:23:46,324 INFO [train.py:932] (2/4) Epoch 20, validation: loss=0.1509, simple_loss=0.2512, pruned_loss=0.02534, over 944034.00 frames. +2023-03-09 12:23:46,324 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 12:23:47,759 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4776, 5.4396, 5.1002, 5.3485, 5.4067, 4.8313, 5.3155, 5.0691], + device='cuda:2'), covar=tensor([0.0391, 0.0450, 0.1246, 0.0851, 0.0566, 0.0366, 0.0428, 0.1144], + device='cuda:2'), in_proj_covar=tensor([0.0466, 0.0534, 0.0677, 0.0418, 0.0435, 0.0485, 0.0529, 0.0657], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 12:23:56,109 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69055.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:24:17,099 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.770e+02 2.674e+02 3.193e+02 4.211e+02 7.931e+02, threshold=6.386e+02, percent-clipped=3.0 +2023-03-09 12:24:18,686 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8834, 4.2005, 2.5875, 4.1117, 5.1621, 2.5086, 3.7699, 3.9403], + device='cuda:2'), covar=tensor([0.0168, 0.1132, 0.1620, 0.0573, 0.0075, 0.1396, 0.0755, 0.0712], + device='cuda:2'), in_proj_covar=tensor([0.0161, 0.0266, 0.0202, 0.0194, 0.0121, 0.0181, 0.0216, 0.0225], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 12:24:40,624 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-03-09 12:24:45,726 INFO [train.py:898] (2/4) Epoch 20, batch 50, loss[loss=0.1715, simple_loss=0.2667, pruned_loss=0.03815, over 18293.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.26, pruned_loss=0.04012, over 810132.24 frames. ], batch size: 57, lr: 5.72e-03, grad_scale: 4.0 +2023-03-09 12:24:49,107 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-09 12:24:50,050 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.64 vs. limit=2.0 +2023-03-09 12:24:52,745 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69103.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:24:54,054 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69104.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:25:37,791 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.24 vs. limit=5.0 +2023-03-09 12:25:44,569 INFO [train.py:898] (2/4) Epoch 20, batch 100, loss[loss=0.1518, simple_loss=0.2283, pruned_loss=0.03766, over 18482.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2557, pruned_loss=0.03893, over 1431350.05 frames. ], batch size: 44, lr: 5.72e-03, grad_scale: 4.0 +2023-03-09 12:26:01,953 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69162.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:26:07,067 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7075, 3.0005, 4.3368, 3.7559, 2.6977, 4.6825, 3.9868, 2.9809], + device='cuda:2'), covar=tensor([0.0536, 0.1447, 0.0289, 0.0421, 0.1552, 0.0202, 0.0507, 0.0985], + device='cuda:2'), in_proj_covar=tensor([0.0209, 0.0238, 0.0202, 0.0157, 0.0222, 0.0207, 0.0241, 0.0196], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 12:26:09,374 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69168.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:26:15,827 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.993e+02 2.622e+02 3.145e+02 3.671e+02 8.108e+02, threshold=6.290e+02, percent-clipped=1.0 +2023-03-09 12:26:42,979 INFO [train.py:898] (2/4) Epoch 20, batch 150, loss[loss=0.1558, simple_loss=0.2537, pruned_loss=0.029, over 18387.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2547, pruned_loss=0.03806, over 1901455.74 frames. ], batch size: 55, lr: 5.71e-03, grad_scale: 4.0 +2023-03-09 12:27:14,066 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69223.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:27:16,562 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69225.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:27:21,191 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69229.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:27:27,347 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69234.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:27:34,894 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5816, 3.0462, 4.2877, 3.6537, 2.6179, 4.5902, 3.9257, 3.0070], + device='cuda:2'), covar=tensor([0.0493, 0.1262, 0.0277, 0.0391, 0.1504, 0.0172, 0.0476, 0.0858], + device='cuda:2'), in_proj_covar=tensor([0.0209, 0.0237, 0.0200, 0.0157, 0.0221, 0.0206, 0.0240, 0.0196], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 12:27:40,520 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69245.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 12:27:42,407 INFO [train.py:898] (2/4) Epoch 20, batch 200, loss[loss=0.1787, simple_loss=0.2654, pruned_loss=0.04602, over 18094.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2538, pruned_loss=0.03769, over 2286870.81 frames. ], batch size: 62, lr: 5.71e-03, grad_scale: 4.0 +2023-03-09 12:27:46,244 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7507, 3.7911, 5.1379, 4.4966, 3.3699, 2.9631, 4.5927, 5.3332], + device='cuda:2'), covar=tensor([0.0802, 0.1403, 0.0150, 0.0374, 0.0921, 0.1154, 0.0341, 0.0280], + device='cuda:2'), in_proj_covar=tensor([0.0146, 0.0269, 0.0145, 0.0179, 0.0190, 0.0186, 0.0190, 0.0190], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 12:28:07,104 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0826, 5.2136, 5.2423, 4.8821, 4.9690, 4.9458, 5.3103, 5.2881], + device='cuda:2'), covar=tensor([0.0063, 0.0049, 0.0044, 0.0099, 0.0047, 0.0135, 0.0055, 0.0072], + device='cuda:2'), in_proj_covar=tensor([0.0092, 0.0068, 0.0072, 0.0091, 0.0074, 0.0102, 0.0085, 0.0085], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 12:28:13,642 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69273.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:28:14,466 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.067e+02 2.703e+02 3.198e+02 3.752e+02 7.537e+02, threshold=6.397e+02, percent-clipped=4.0 +2023-03-09 12:28:23,451 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69282.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:28:28,139 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69286.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:28:41,463 INFO [train.py:898] (2/4) Epoch 20, batch 250, loss[loss=0.1603, simple_loss=0.2575, pruned_loss=0.0315, over 18379.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2539, pruned_loss=0.03758, over 2582915.36 frames. ], batch size: 55, lr: 5.71e-03, grad_scale: 4.0 +2023-03-09 12:28:52,678 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69306.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 12:29:01,493 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5442, 6.1131, 5.6454, 5.9103, 5.7112, 5.5154, 6.1888, 6.0946], + device='cuda:2'), covar=tensor([0.1172, 0.0758, 0.0409, 0.0691, 0.1326, 0.0767, 0.0510, 0.0696], + device='cuda:2'), in_proj_covar=tensor([0.0605, 0.0517, 0.0380, 0.0543, 0.0731, 0.0536, 0.0733, 0.0558], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 12:29:08,286 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69320.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:29:24,479 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69334.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 12:29:32,177 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69341.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:29:39,805 INFO [train.py:898] (2/4) Epoch 20, batch 300, loss[loss=0.1625, simple_loss=0.2576, pruned_loss=0.03365, over 18573.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2542, pruned_loss=0.03757, over 2822231.17 frames. ], batch size: 54, lr: 5.71e-03, grad_scale: 4.0 +2023-03-09 12:30:04,290 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69368.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:30:10,765 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.810e+02 2.793e+02 3.155e+02 4.197e+02 1.182e+03, threshold=6.310e+02, percent-clipped=4.0 +2023-03-09 12:30:13,442 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.2418, 5.6449, 2.8792, 5.3826, 5.3016, 5.6361, 5.5013, 3.1834], + device='cuda:2'), covar=tensor([0.0171, 0.0077, 0.0729, 0.0064, 0.0073, 0.0070, 0.0071, 0.0817], + device='cuda:2'), in_proj_covar=tensor([0.0087, 0.0081, 0.0096, 0.0096, 0.0085, 0.0075, 0.0084, 0.0098], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 12:30:19,723 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69381.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:30:28,850 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69389.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:30:34,525 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69394.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:30:37,563 INFO [train.py:898] (2/4) Epoch 20, batch 350, loss[loss=0.1515, simple_loss=0.248, pruned_loss=0.02751, over 18400.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2542, pruned_loss=0.03761, over 2996301.16 frames. ], batch size: 52, lr: 5.71e-03, grad_scale: 4.0 +2023-03-09 12:30:40,737 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69399.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:31:16,147 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69429.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:31:36,508 INFO [train.py:898] (2/4) Epoch 20, batch 400, loss[loss=0.1582, simple_loss=0.2478, pruned_loss=0.03428, over 18481.00 frames. ], tot_loss[loss=0.1637, simple_loss=0.253, pruned_loss=0.03723, over 3128322.08 frames. ], batch size: 51, lr: 5.70e-03, grad_scale: 8.0 +2023-03-09 12:31:47,151 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69455.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:32:00,621 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6285, 3.6082, 2.3702, 4.4783, 3.1595, 4.3926, 2.5298, 4.1393], + device='cuda:2'), covar=tensor([0.0624, 0.0765, 0.1380, 0.0496, 0.0877, 0.0340, 0.1174, 0.0384], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0223, 0.0188, 0.0281, 0.0193, 0.0262, 0.0203, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 12:32:08,924 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.671e+02 2.636e+02 3.143e+02 3.755e+02 6.655e+02, threshold=6.287e+02, percent-clipped=1.0 +2023-03-09 12:32:35,587 INFO [train.py:898] (2/4) Epoch 20, batch 450, loss[loss=0.1792, simple_loss=0.2661, pruned_loss=0.04615, over 18078.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2536, pruned_loss=0.03776, over 3230702.02 frames. ], batch size: 62, lr: 5.70e-03, grad_scale: 8.0 +2023-03-09 12:33:00,424 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69518.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:33:07,376 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69524.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:33:33,767 INFO [train.py:898] (2/4) Epoch 20, batch 500, loss[loss=0.1628, simple_loss=0.2386, pruned_loss=0.04349, over 18398.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2535, pruned_loss=0.03783, over 3307965.56 frames. ], batch size: 42, lr: 5.70e-03, grad_scale: 8.0 +2023-03-09 12:34:05,765 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.904e+02 2.776e+02 3.162e+02 3.774e+02 8.469e+02, threshold=6.325e+02, percent-clipped=1.0 +2023-03-09 12:34:14,449 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69581.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:34:24,415 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3622, 5.2327, 5.5601, 5.5320, 5.3067, 6.0865, 5.7231, 5.4617], + device='cuda:2'), covar=tensor([0.1037, 0.0596, 0.0735, 0.0736, 0.1416, 0.0702, 0.0625, 0.1487], + device='cuda:2'), in_proj_covar=tensor([0.0352, 0.0278, 0.0305, 0.0302, 0.0327, 0.0413, 0.0276, 0.0404], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:2') +2023-03-09 12:34:32,473 INFO [train.py:898] (2/4) Epoch 20, batch 550, loss[loss=0.1468, simple_loss=0.2331, pruned_loss=0.03024, over 18363.00 frames. ], tot_loss[loss=0.164, simple_loss=0.253, pruned_loss=0.0375, over 3372960.11 frames. ], batch size: 46, lr: 5.70e-03, grad_scale: 4.0 +2023-03-09 12:34:37,544 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69601.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 12:34:40,040 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7110, 3.6299, 4.9971, 4.3045, 3.2329, 2.8818, 4.4553, 5.2211], + device='cuda:2'), covar=tensor([0.0787, 0.1433, 0.0183, 0.0393, 0.0971, 0.1167, 0.0357, 0.0233], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0272, 0.0146, 0.0180, 0.0191, 0.0188, 0.0192, 0.0191], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 12:34:48,768 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-09 12:35:10,084 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69629.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 12:35:15,447 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8256, 3.6164, 5.0508, 2.8475, 4.3809, 2.5951, 3.1281, 1.8035], + device='cuda:2'), covar=tensor([0.1194, 0.1000, 0.0163, 0.0929, 0.0485, 0.2454, 0.2625, 0.2223], + device='cuda:2'), in_proj_covar=tensor([0.0216, 0.0241, 0.0179, 0.0192, 0.0252, 0.0267, 0.0318, 0.0231], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 12:35:31,674 INFO [train.py:898] (2/4) Epoch 20, batch 600, loss[loss=0.1686, simple_loss=0.2559, pruned_loss=0.04062, over 18497.00 frames. ], tot_loss[loss=0.1648, simple_loss=0.2541, pruned_loss=0.03777, over 3418508.96 frames. ], batch size: 53, lr: 5.69e-03, grad_scale: 4.0 +2023-03-09 12:36:04,116 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69674.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 12:36:04,878 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.123e+02 2.772e+02 3.241e+02 3.888e+02 6.801e+02, threshold=6.482e+02, percent-clipped=2.0 +2023-03-09 12:36:06,289 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69676.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:36:14,578 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.53 vs. limit=5.0 +2023-03-09 12:36:29,579 INFO [train.py:898] (2/4) Epoch 20, batch 650, loss[loss=0.1683, simple_loss=0.261, pruned_loss=0.03774, over 18629.00 frames. ], tot_loss[loss=0.1648, simple_loss=0.254, pruned_loss=0.03778, over 3457262.19 frames. ], batch size: 52, lr: 5.69e-03, grad_scale: 4.0 +2023-03-09 12:36:32,780 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69699.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:36:35,359 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-09 12:36:41,994 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5212, 2.8333, 4.1474, 3.6718, 2.4208, 4.4029, 3.7508, 2.8179], + device='cuda:2'), covar=tensor([0.0517, 0.1416, 0.0268, 0.0379, 0.1646, 0.0208, 0.0560, 0.0965], + device='cuda:2'), in_proj_covar=tensor([0.0208, 0.0237, 0.0202, 0.0159, 0.0221, 0.0207, 0.0243, 0.0196], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 12:37:01,632 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69724.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:37:14,056 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69735.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 12:37:28,027 INFO [train.py:898] (2/4) Epoch 20, batch 700, loss[loss=0.1659, simple_loss=0.2528, pruned_loss=0.03953, over 18383.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2549, pruned_loss=0.03817, over 3469185.51 frames. ], batch size: 50, lr: 5.69e-03, grad_scale: 4.0 +2023-03-09 12:37:28,190 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69747.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:37:28,431 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5292, 2.7501, 2.4922, 2.8441, 3.5609, 3.4472, 3.0231, 2.9504], + device='cuda:2'), covar=tensor([0.0191, 0.0257, 0.0577, 0.0404, 0.0166, 0.0160, 0.0362, 0.0318], + device='cuda:2'), in_proj_covar=tensor([0.0138, 0.0131, 0.0161, 0.0155, 0.0128, 0.0114, 0.0151, 0.0153], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 12:37:31,613 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69750.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:37:41,105 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4940, 5.4758, 5.0726, 5.4907, 5.4503, 4.8127, 5.3115, 5.0910], + device='cuda:2'), covar=tensor([0.0441, 0.0416, 0.1330, 0.0696, 0.0542, 0.0398, 0.0442, 0.1034], + device='cuda:2'), in_proj_covar=tensor([0.0478, 0.0544, 0.0689, 0.0425, 0.0439, 0.0491, 0.0539, 0.0667], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 12:37:53,335 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.73 vs. limit=2.0 +2023-03-09 12:38:00,236 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.022e+02 2.614e+02 3.096e+02 3.753e+02 7.667e+02, threshold=6.192e+02, percent-clipped=2.0 +2023-03-09 12:38:26,159 INFO [train.py:898] (2/4) Epoch 20, batch 750, loss[loss=0.1663, simple_loss=0.2596, pruned_loss=0.03654, over 18234.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2543, pruned_loss=0.03808, over 3497618.52 frames. ], batch size: 60, lr: 5.69e-03, grad_scale: 4.0 +2023-03-09 12:38:50,803 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69818.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:38:51,063 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7742, 2.4447, 2.7203, 2.8837, 3.4308, 5.0599, 4.9117, 3.5458], + device='cuda:2'), covar=tensor([0.1792, 0.2323, 0.2963, 0.1779, 0.2221, 0.0209, 0.0350, 0.0923], + device='cuda:2'), in_proj_covar=tensor([0.0298, 0.0344, 0.0378, 0.0276, 0.0390, 0.0237, 0.0293, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 12:38:58,126 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69824.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:39:25,616 INFO [train.py:898] (2/4) Epoch 20, batch 800, loss[loss=0.1505, simple_loss=0.2289, pruned_loss=0.03612, over 17704.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.2537, pruned_loss=0.03805, over 3510070.66 frames. ], batch size: 39, lr: 5.69e-03, grad_scale: 8.0 +2023-03-09 12:39:48,074 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69866.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:39:55,154 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69872.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:39:58,463 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.014e+02 2.750e+02 3.144e+02 3.939e+02 8.664e+02, threshold=6.288e+02, percent-clipped=4.0 +2023-03-09 12:40:05,918 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69881.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:40:23,198 INFO [train.py:898] (2/4) Epoch 20, batch 850, loss[loss=0.1668, simple_loss=0.261, pruned_loss=0.03624, over 18624.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2552, pruned_loss=0.03874, over 3521663.73 frames. ], batch size: 52, lr: 5.68e-03, grad_scale: 8.0 +2023-03-09 12:40:28,230 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69901.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 12:41:01,251 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69929.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:41:01,324 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69929.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 12:41:21,120 INFO [train.py:898] (2/4) Epoch 20, batch 900, loss[loss=0.1436, simple_loss=0.2277, pruned_loss=0.02976, over 18500.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.2553, pruned_loss=0.0386, over 3529370.97 frames. ], batch size: 47, lr: 5.68e-03, grad_scale: 8.0 +2023-03-09 12:41:23,621 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69949.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 12:41:54,373 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.882e+02 2.754e+02 3.235e+02 3.952e+02 9.067e+02, threshold=6.470e+02, percent-clipped=4.0 +2023-03-09 12:41:55,818 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69976.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:41:56,852 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=69977.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:42:19,882 INFO [train.py:898] (2/4) Epoch 20, batch 950, loss[loss=0.1475, simple_loss=0.2307, pruned_loss=0.03222, over 18243.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2546, pruned_loss=0.03816, over 3541401.78 frames. ], batch size: 45, lr: 5.68e-03, grad_scale: 8.0 +2023-03-09 12:42:56,935 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70024.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:42:57,116 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70024.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:43:04,273 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70030.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 12:43:23,756 INFO [train.py:898] (2/4) Epoch 20, batch 1000, loss[loss=0.1886, simple_loss=0.2808, pruned_loss=0.04822, over 18250.00 frames. ], tot_loss[loss=0.1657, simple_loss=0.2551, pruned_loss=0.03818, over 3554215.59 frames. ], batch size: 60, lr: 5.68e-03, grad_scale: 8.0 +2023-03-09 12:43:27,325 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70050.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:43:52,919 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70072.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:43:56,168 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.914e+02 2.640e+02 3.091e+02 3.737e+02 6.684e+02, threshold=6.182e+02, percent-clipped=1.0 +2023-03-09 12:44:22,008 INFO [train.py:898] (2/4) Epoch 20, batch 1050, loss[loss=0.1536, simple_loss=0.2447, pruned_loss=0.03123, over 18392.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2544, pruned_loss=0.03797, over 3563693.75 frames. ], batch size: 50, lr: 5.68e-03, grad_scale: 8.0 +2023-03-09 12:44:23,293 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70098.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:44:37,172 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70110.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:45:20,004 INFO [train.py:898] (2/4) Epoch 20, batch 1100, loss[loss=0.1539, simple_loss=0.2411, pruned_loss=0.03333, over 18299.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.2543, pruned_loss=0.03791, over 3568938.63 frames. ], batch size: 49, lr: 5.67e-03, grad_scale: 8.0 +2023-03-09 12:45:23,013 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.74 vs. limit=2.0 +2023-03-09 12:45:45,214 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-09 12:45:48,200 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70171.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:45:52,308 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.767e+02 2.847e+02 3.446e+02 4.067e+02 6.954e+02, threshold=6.891e+02, percent-clipped=3.0 +2023-03-09 12:46:17,953 INFO [train.py:898] (2/4) Epoch 20, batch 1150, loss[loss=0.1521, simple_loss=0.2413, pruned_loss=0.03147, over 18492.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2545, pruned_loss=0.0379, over 3581394.07 frames. ], batch size: 44, lr: 5.67e-03, grad_scale: 8.0 +2023-03-09 12:47:04,658 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7121, 3.7399, 5.0209, 4.4838, 3.3217, 2.9439, 4.4262, 5.2430], + device='cuda:2'), covar=tensor([0.0834, 0.1417, 0.0175, 0.0348, 0.0968, 0.1216, 0.0377, 0.0278], + device='cuda:2'), in_proj_covar=tensor([0.0148, 0.0272, 0.0147, 0.0180, 0.0191, 0.0189, 0.0192, 0.0193], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 12:47:16,745 INFO [train.py:898] (2/4) Epoch 20, batch 1200, loss[loss=0.1491, simple_loss=0.2388, pruned_loss=0.02974, over 18352.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2548, pruned_loss=0.03809, over 3584098.91 frames. ], batch size: 46, lr: 5.67e-03, grad_scale: 8.0 +2023-03-09 12:47:31,568 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70260.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:47:49,160 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.495e+02 2.757e+02 3.193e+02 3.861e+02 7.458e+02, threshold=6.386e+02, percent-clipped=1.0 +2023-03-09 12:48:15,314 INFO [train.py:898] (2/4) Epoch 20, batch 1250, loss[loss=0.1559, simple_loss=0.2487, pruned_loss=0.03158, over 18500.00 frames. ], tot_loss[loss=0.1661, simple_loss=0.2554, pruned_loss=0.03837, over 3581212.40 frames. ], batch size: 51, lr: 5.67e-03, grad_scale: 8.0 +2023-03-09 12:48:21,529 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-03-09 12:48:42,653 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70321.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:48:44,433 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0148, 3.6897, 5.0289, 4.3989, 3.4485, 3.0067, 4.4707, 5.2691], + device='cuda:2'), covar=tensor([0.0776, 0.1504, 0.0166, 0.0405, 0.0897, 0.1198, 0.0404, 0.0227], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0270, 0.0146, 0.0179, 0.0189, 0.0187, 0.0191, 0.0191], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 12:48:53,766 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70330.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 12:49:13,037 INFO [train.py:898] (2/4) Epoch 20, batch 1300, loss[loss=0.1602, simple_loss=0.2561, pruned_loss=0.03211, over 16296.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2547, pruned_loss=0.03817, over 3586221.68 frames. ], batch size: 94, lr: 5.67e-03, grad_scale: 8.0 +2023-03-09 12:49:44,657 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.016e+02 2.769e+02 3.249e+02 3.833e+02 9.851e+02, threshold=6.498e+02, percent-clipped=4.0 +2023-03-09 12:49:48,778 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70378.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 12:50:05,185 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.50 vs. limit=2.0 +2023-03-09 12:50:10,105 INFO [train.py:898] (2/4) Epoch 20, batch 1350, loss[loss=0.1627, simple_loss=0.249, pruned_loss=0.03813, over 18132.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2546, pruned_loss=0.03826, over 3587147.35 frames. ], batch size: 44, lr: 5.66e-03, grad_scale: 8.0 +2023-03-09 12:51:08,594 INFO [train.py:898] (2/4) Epoch 20, batch 1400, loss[loss=0.1573, simple_loss=0.2526, pruned_loss=0.03102, over 18401.00 frames. ], tot_loss[loss=0.1657, simple_loss=0.2549, pruned_loss=0.03825, over 3589947.44 frames. ], batch size: 52, lr: 5.66e-03, grad_scale: 8.0 +2023-03-09 12:51:31,263 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70466.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:51:41,135 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.979e+02 2.850e+02 3.137e+02 3.892e+02 8.751e+02, threshold=6.275e+02, percent-clipped=3.0 +2023-03-09 12:52:06,376 INFO [train.py:898] (2/4) Epoch 20, batch 1450, loss[loss=0.1637, simple_loss=0.2544, pruned_loss=0.03652, over 18327.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2551, pruned_loss=0.03831, over 3588259.95 frames. ], batch size: 54, lr: 5.66e-03, grad_scale: 8.0 +2023-03-09 12:52:18,768 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4649, 2.2094, 4.0098, 3.7266, 2.3070, 4.2448, 3.6756, 2.6506], + device='cuda:2'), covar=tensor([0.0456, 0.2080, 0.0327, 0.0315, 0.2011, 0.0269, 0.0575, 0.1229], + device='cuda:2'), in_proj_covar=tensor([0.0207, 0.0236, 0.0202, 0.0159, 0.0221, 0.0208, 0.0241, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 12:53:04,437 INFO [train.py:898] (2/4) Epoch 20, batch 1500, loss[loss=0.1654, simple_loss=0.2593, pruned_loss=0.0358, over 18616.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2546, pruned_loss=0.03802, over 3584773.35 frames. ], batch size: 52, lr: 5.66e-03, grad_scale: 4.0 +2023-03-09 12:53:38,231 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.836e+02 2.609e+02 3.412e+02 4.380e+02 8.286e+02, threshold=6.824e+02, percent-clipped=3.0 +2023-03-09 12:53:42,512 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.03 vs. limit=5.0 +2023-03-09 12:54:03,313 INFO [train.py:898] (2/4) Epoch 20, batch 1550, loss[loss=0.1429, simple_loss=0.2255, pruned_loss=0.03017, over 18502.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.254, pruned_loss=0.03746, over 3600105.66 frames. ], batch size: 44, lr: 5.66e-03, grad_scale: 4.0 +2023-03-09 12:54:25,707 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70616.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:54:28,160 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5678, 3.4833, 4.7987, 4.2160, 3.2674, 2.8241, 4.2344, 5.0446], + device='cuda:2'), covar=tensor([0.0894, 0.1729, 0.0226, 0.0448, 0.0979, 0.1273, 0.0441, 0.0241], + device='cuda:2'), in_proj_covar=tensor([0.0149, 0.0274, 0.0150, 0.0182, 0.0192, 0.0190, 0.0195, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 12:55:02,177 INFO [train.py:898] (2/4) Epoch 20, batch 1600, loss[loss=0.1648, simple_loss=0.2613, pruned_loss=0.03412, over 18340.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2543, pruned_loss=0.03738, over 3606531.24 frames. ], batch size: 56, lr: 5.65e-03, grad_scale: 8.0 +2023-03-09 12:55:35,598 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.948e+02 2.737e+02 3.118e+02 3.818e+02 6.822e+02, threshold=6.236e+02, percent-clipped=0.0 +2023-03-09 12:55:59,467 INFO [train.py:898] (2/4) Epoch 20, batch 1650, loss[loss=0.1861, simple_loss=0.2757, pruned_loss=0.04828, over 18446.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2548, pruned_loss=0.03776, over 3603841.57 frames. ], batch size: 59, lr: 5.65e-03, grad_scale: 8.0 +2023-03-09 12:56:57,267 INFO [train.py:898] (2/4) Epoch 20, batch 1700, loss[loss=0.1718, simple_loss=0.2569, pruned_loss=0.04334, over 18491.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2548, pruned_loss=0.03801, over 3592947.04 frames. ], batch size: 44, lr: 5.65e-03, grad_scale: 8.0 +2023-03-09 12:57:20,532 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70766.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:57:31,229 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.987e+02 2.775e+02 3.401e+02 4.011e+02 8.447e+02, threshold=6.802e+02, percent-clipped=3.0 +2023-03-09 12:57:38,818 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3274, 5.3584, 4.9645, 5.2615, 5.2944, 4.6946, 5.1880, 4.9288], + device='cuda:2'), covar=tensor([0.0442, 0.0366, 0.1248, 0.0751, 0.0555, 0.0394, 0.0371, 0.1023], + device='cuda:2'), in_proj_covar=tensor([0.0477, 0.0544, 0.0691, 0.0429, 0.0440, 0.0495, 0.0536, 0.0666], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 12:57:55,559 INFO [train.py:898] (2/4) Epoch 20, batch 1750, loss[loss=0.1671, simple_loss=0.2648, pruned_loss=0.03469, over 18585.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2546, pruned_loss=0.0377, over 3599089.36 frames. ], batch size: 54, lr: 5.65e-03, grad_scale: 8.0 +2023-03-09 12:58:03,827 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5541, 2.6710, 2.4575, 2.8150, 3.5635, 3.4862, 3.0333, 2.8202], + device='cuda:2'), covar=tensor([0.0187, 0.0309, 0.0572, 0.0454, 0.0200, 0.0160, 0.0431, 0.0399], + device='cuda:2'), in_proj_covar=tensor([0.0139, 0.0131, 0.0160, 0.0155, 0.0128, 0.0114, 0.0150, 0.0153], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 12:58:16,244 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70814.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 12:58:40,100 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3603, 5.2542, 5.6108, 5.5517, 5.2992, 6.1184, 5.8190, 5.3561], + device='cuda:2'), covar=tensor([0.1183, 0.0603, 0.0686, 0.0717, 0.1252, 0.0764, 0.0589, 0.1798], + device='cuda:2'), in_proj_covar=tensor([0.0355, 0.0282, 0.0305, 0.0303, 0.0327, 0.0418, 0.0277, 0.0411], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:2') +2023-03-09 12:58:41,322 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8230, 4.5233, 4.5606, 3.2401, 3.6986, 3.2531, 2.5764, 2.5997], + device='cuda:2'), covar=tensor([0.0182, 0.0128, 0.0077, 0.0347, 0.0346, 0.0250, 0.0768, 0.0800], + device='cuda:2'), in_proj_covar=tensor([0.0069, 0.0059, 0.0061, 0.0069, 0.0089, 0.0066, 0.0077, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 12:58:43,490 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2825, 3.2514, 3.1839, 2.9044, 3.1183, 2.6601, 2.5224, 3.2536], + device='cuda:2'), covar=tensor([0.0066, 0.0085, 0.0075, 0.0127, 0.0087, 0.0171, 0.0194, 0.0083], + device='cuda:2'), in_proj_covar=tensor([0.0136, 0.0155, 0.0130, 0.0184, 0.0140, 0.0178, 0.0179, 0.0116], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 12:58:54,610 INFO [train.py:898] (2/4) Epoch 20, batch 1800, loss[loss=0.1867, simple_loss=0.2733, pruned_loss=0.05007, over 17847.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.2543, pruned_loss=0.03777, over 3597387.81 frames. ], batch size: 70, lr: 5.65e-03, grad_scale: 8.0 +2023-03-09 12:59:28,914 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.702e+02 2.663e+02 3.014e+02 3.631e+02 8.036e+02, threshold=6.028e+02, percent-clipped=1.0 +2023-03-09 12:59:36,812 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5565, 2.1796, 2.4406, 2.4565, 2.9562, 4.7265, 4.5133, 3.4624], + device='cuda:2'), covar=tensor([0.1851, 0.2686, 0.3405, 0.2065, 0.2693, 0.0247, 0.0397, 0.0850], + device='cuda:2'), in_proj_covar=tensor([0.0295, 0.0344, 0.0377, 0.0276, 0.0385, 0.0239, 0.0293, 0.0250], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 12:59:41,854 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 12:59:53,195 INFO [train.py:898] (2/4) Epoch 20, batch 1850, loss[loss=0.1483, simple_loss=0.2361, pruned_loss=0.03021, over 18415.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2546, pruned_loss=0.03808, over 3582642.47 frames. ], batch size: 48, lr: 5.64e-03, grad_scale: 8.0 +2023-03-09 13:00:16,554 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9272, 3.9284, 4.0130, 3.8118, 3.8348, 3.8724, 3.9865, 4.0501], + device='cuda:2'), covar=tensor([0.0088, 0.0080, 0.0081, 0.0109, 0.0073, 0.0138, 0.0081, 0.0091], + device='cuda:2'), in_proj_covar=tensor([0.0094, 0.0070, 0.0073, 0.0092, 0.0075, 0.0104, 0.0086, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 13:00:16,568 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70916.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:00:51,723 INFO [train.py:898] (2/4) Epoch 20, batch 1900, loss[loss=0.1783, simple_loss=0.2694, pruned_loss=0.04366, over 17093.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2546, pruned_loss=0.03817, over 3573293.87 frames. ], batch size: 78, lr: 5.64e-03, grad_scale: 8.0 +2023-03-09 13:00:59,082 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7946, 2.9136, 2.7551, 3.1125, 3.8179, 3.7234, 3.3478, 3.1609], + device='cuda:2'), covar=tensor([0.0183, 0.0311, 0.0538, 0.0390, 0.0157, 0.0155, 0.0404, 0.0365], + device='cuda:2'), in_proj_covar=tensor([0.0139, 0.0130, 0.0159, 0.0154, 0.0127, 0.0113, 0.0149, 0.0153], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 13:01:09,044 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.30 vs. limit=5.0 +2023-03-09 13:01:11,885 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=70964.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:01:26,024 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.590e+02 2.797e+02 3.325e+02 3.887e+02 8.370e+02, threshold=6.650e+02, percent-clipped=5.0 +2023-03-09 13:01:44,865 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70992.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:01:50,089 INFO [train.py:898] (2/4) Epoch 20, batch 1950, loss[loss=0.1828, simple_loss=0.2723, pruned_loss=0.04662, over 17750.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2542, pruned_loss=0.03829, over 3555212.99 frames. ], batch size: 70, lr: 5.64e-03, grad_scale: 8.0 +2023-03-09 13:02:28,774 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71030.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:02:47,806 INFO [train.py:898] (2/4) Epoch 20, batch 2000, loss[loss=0.1752, simple_loss=0.2588, pruned_loss=0.0458, over 15897.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.255, pruned_loss=0.03839, over 3565240.39 frames. ], batch size: 94, lr: 5.64e-03, grad_scale: 8.0 +2023-03-09 13:02:54,860 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71053.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:03:21,387 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.116e+02 2.863e+02 3.373e+02 3.932e+02 6.166e+02, threshold=6.746e+02, percent-clipped=0.0 +2023-03-09 13:03:36,546 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9493, 5.3868, 2.7316, 5.2036, 5.0316, 5.3958, 5.2057, 2.7915], + device='cuda:2'), covar=tensor([0.0200, 0.0064, 0.0815, 0.0070, 0.0071, 0.0069, 0.0083, 0.1030], + device='cuda:2'), in_proj_covar=tensor([0.0087, 0.0081, 0.0096, 0.0096, 0.0085, 0.0076, 0.0085, 0.0097], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 13:03:39,978 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71091.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:03:46,438 INFO [train.py:898] (2/4) Epoch 20, batch 2050, loss[loss=0.1684, simple_loss=0.2628, pruned_loss=0.03697, over 18306.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2543, pruned_loss=0.03813, over 3570935.95 frames. ], batch size: 49, lr: 5.64e-03, grad_scale: 8.0 +2023-03-09 13:04:17,481 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8944, 3.3921, 2.7202, 3.3070, 4.0774, 2.5482, 3.4242, 3.3871], + device='cuda:2'), covar=tensor([0.0254, 0.1172, 0.1185, 0.0694, 0.0139, 0.1093, 0.0639, 0.0720], + device='cuda:2'), in_proj_covar=tensor([0.0161, 0.0268, 0.0203, 0.0195, 0.0123, 0.0181, 0.0213, 0.0223], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 13:04:33,913 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0674, 5.5026, 2.8619, 5.2875, 5.2045, 5.4963, 5.3072, 2.8971], + device='cuda:2'), covar=tensor([0.0179, 0.0062, 0.0700, 0.0067, 0.0059, 0.0062, 0.0080, 0.0898], + device='cuda:2'), in_proj_covar=tensor([0.0086, 0.0080, 0.0094, 0.0095, 0.0084, 0.0075, 0.0084, 0.0096], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 13:04:45,466 INFO [train.py:898] (2/4) Epoch 20, batch 2100, loss[loss=0.1817, simple_loss=0.2723, pruned_loss=0.04551, over 18404.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2534, pruned_loss=0.03772, over 3570605.71 frames. ], batch size: 52, lr: 5.63e-03, grad_scale: 8.0 +2023-03-09 13:05:19,430 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.825e+02 2.628e+02 3.039e+02 3.790e+02 1.100e+03, threshold=6.078e+02, percent-clipped=2.0 +2023-03-09 13:05:44,229 INFO [train.py:898] (2/4) Epoch 20, batch 2150, loss[loss=0.1553, simple_loss=0.2459, pruned_loss=0.03235, over 18282.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2537, pruned_loss=0.03781, over 3560109.54 frames. ], batch size: 49, lr: 5.63e-03, grad_scale: 8.0 +2023-03-09 13:05:49,505 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1557, 4.2582, 2.6513, 4.2953, 5.3791, 2.8138, 4.0208, 4.1300], + device='cuda:2'), covar=tensor([0.0187, 0.1280, 0.1576, 0.0599, 0.0074, 0.1144, 0.0612, 0.0705], + device='cuda:2'), in_proj_covar=tensor([0.0162, 0.0268, 0.0204, 0.0195, 0.0124, 0.0182, 0.0215, 0.0224], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 13:05:57,462 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5707, 3.5509, 3.4220, 3.0651, 3.2758, 2.7162, 2.7236, 3.5999], + device='cuda:2'), covar=tensor([0.0062, 0.0086, 0.0074, 0.0148, 0.0103, 0.0197, 0.0214, 0.0065], + device='cuda:2'), in_proj_covar=tensor([0.0135, 0.0155, 0.0129, 0.0183, 0.0140, 0.0177, 0.0179, 0.0116], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 13:06:43,286 INFO [train.py:898] (2/4) Epoch 20, batch 2200, loss[loss=0.1507, simple_loss=0.2378, pruned_loss=0.03176, over 18484.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.2538, pruned_loss=0.03799, over 3560979.53 frames. ], batch size: 47, lr: 5.63e-03, grad_scale: 8.0 +2023-03-09 13:07:09,065 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-09 13:07:16,426 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.980e+02 2.743e+02 3.244e+02 3.938e+02 1.174e+03, threshold=6.489e+02, percent-clipped=4.0 +2023-03-09 13:07:41,312 INFO [train.py:898] (2/4) Epoch 20, batch 2250, loss[loss=0.1794, simple_loss=0.2684, pruned_loss=0.04516, over 17814.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.2542, pruned_loss=0.038, over 3570846.38 frames. ], batch size: 70, lr: 5.63e-03, grad_scale: 8.0 +2023-03-09 13:08:01,310 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4322, 5.1656, 5.6611, 5.5074, 5.3175, 6.1175, 5.7228, 5.3656], + device='cuda:2'), covar=tensor([0.1041, 0.0663, 0.0633, 0.0699, 0.1308, 0.0646, 0.0641, 0.1633], + device='cuda:2'), in_proj_covar=tensor([0.0356, 0.0284, 0.0306, 0.0304, 0.0330, 0.0417, 0.0280, 0.0411], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:2') +2023-03-09 13:08:18,134 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-03-09 13:08:40,049 INFO [train.py:898] (2/4) Epoch 20, batch 2300, loss[loss=0.1607, simple_loss=0.2488, pruned_loss=0.03634, over 18251.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2536, pruned_loss=0.03788, over 3576655.48 frames. ], batch size: 47, lr: 5.63e-03, grad_scale: 8.0 +2023-03-09 13:08:41,402 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71348.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:09:13,546 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.760e+02 2.586e+02 3.152e+02 3.675e+02 6.468e+02, threshold=6.303e+02, percent-clipped=0.0 +2023-03-09 13:09:25,920 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71386.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:09:30,572 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8526, 5.3806, 5.3764, 5.3358, 4.8570, 5.2354, 4.6882, 5.2380], + device='cuda:2'), covar=tensor([0.0239, 0.0255, 0.0185, 0.0380, 0.0385, 0.0222, 0.1035, 0.0297], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0260, 0.0250, 0.0325, 0.0265, 0.0269, 0.0308, 0.0257], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 13:09:38,248 INFO [train.py:898] (2/4) Epoch 20, batch 2350, loss[loss=0.2008, simple_loss=0.2825, pruned_loss=0.05957, over 12731.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.253, pruned_loss=0.0378, over 3569224.93 frames. ], batch size: 130, lr: 5.62e-03, grad_scale: 8.0 +2023-03-09 13:10:37,015 INFO [train.py:898] (2/4) Epoch 20, batch 2400, loss[loss=0.1666, simple_loss=0.2599, pruned_loss=0.03663, over 17895.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2535, pruned_loss=0.03745, over 3580966.71 frames. ], batch size: 70, lr: 5.62e-03, grad_scale: 8.0 +2023-03-09 13:11:10,785 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.729e+02 2.680e+02 3.169e+02 3.609e+02 6.256e+02, threshold=6.338e+02, percent-clipped=0.0 +2023-03-09 13:11:12,158 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.3786, 4.8571, 4.8394, 4.9310, 4.3785, 4.7644, 4.1142, 4.7509], + device='cuda:2'), covar=tensor([0.0300, 0.0360, 0.0253, 0.0403, 0.0407, 0.0273, 0.1357, 0.0342], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0259, 0.0248, 0.0324, 0.0265, 0.0267, 0.0307, 0.0256], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 13:11:35,512 INFO [train.py:898] (2/4) Epoch 20, batch 2450, loss[loss=0.1564, simple_loss=0.2529, pruned_loss=0.02993, over 18480.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2531, pruned_loss=0.03734, over 3577751.74 frames. ], batch size: 53, lr: 5.62e-03, grad_scale: 8.0 +2023-03-09 13:11:58,180 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5476, 5.0619, 5.0386, 5.0846, 4.5388, 4.9548, 4.3753, 4.9133], + device='cuda:2'), covar=tensor([0.0268, 0.0282, 0.0203, 0.0388, 0.0387, 0.0224, 0.1132, 0.0338], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0258, 0.0247, 0.0323, 0.0264, 0.0266, 0.0306, 0.0256], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 13:12:14,002 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5149, 6.0350, 5.5642, 5.8180, 5.6006, 5.4929, 6.0546, 6.0155], + device='cuda:2'), covar=tensor([0.1038, 0.0743, 0.0410, 0.0741, 0.1374, 0.0642, 0.0586, 0.0689], + device='cuda:2'), in_proj_covar=tensor([0.0600, 0.0523, 0.0377, 0.0539, 0.0724, 0.0532, 0.0735, 0.0554], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 13:12:33,820 INFO [train.py:898] (2/4) Epoch 20, batch 2500, loss[loss=0.1777, simple_loss=0.2678, pruned_loss=0.04375, over 18484.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.2532, pruned_loss=0.03773, over 3585324.99 frames. ], batch size: 59, lr: 5.62e-03, grad_scale: 8.0 +2023-03-09 13:12:37,050 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-09 13:12:37,472 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5488, 5.5179, 5.1392, 5.4907, 5.4770, 4.8978, 5.3684, 5.1670], + device='cuda:2'), covar=tensor([0.0394, 0.0401, 0.1373, 0.0714, 0.0536, 0.0372, 0.0437, 0.1034], + device='cuda:2'), in_proj_covar=tensor([0.0488, 0.0552, 0.0705, 0.0436, 0.0447, 0.0501, 0.0539, 0.0678], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 13:13:05,865 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9760, 5.4382, 2.9907, 5.2544, 5.1741, 5.4453, 5.2743, 2.7064], + device='cuda:2'), covar=tensor([0.0198, 0.0057, 0.0671, 0.0066, 0.0059, 0.0064, 0.0071, 0.1025], + device='cuda:2'), in_proj_covar=tensor([0.0087, 0.0080, 0.0095, 0.0094, 0.0084, 0.0075, 0.0084, 0.0097], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 13:13:07,752 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.564e+02 2.655e+02 3.143e+02 3.839e+02 8.943e+02, threshold=6.287e+02, percent-clipped=3.0 +2023-03-09 13:13:32,182 INFO [train.py:898] (2/4) Epoch 20, batch 2550, loss[loss=0.1719, simple_loss=0.2585, pruned_loss=0.04262, over 18411.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2537, pruned_loss=0.03781, over 3587715.06 frames. ], batch size: 48, lr: 5.62e-03, grad_scale: 8.0 +2023-03-09 13:13:41,245 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.67 vs. limit=5.0 +2023-03-09 13:13:55,596 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71616.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:14:31,306 INFO [train.py:898] (2/4) Epoch 20, batch 2600, loss[loss=0.1536, simple_loss=0.2491, pruned_loss=0.02902, over 18372.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.2532, pruned_loss=0.03771, over 3588567.10 frames. ], batch size: 52, lr: 5.62e-03, grad_scale: 8.0 +2023-03-09 13:14:32,720 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=71648.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:14:33,860 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0597, 5.5143, 2.6969, 5.3155, 5.2600, 5.5252, 5.3592, 2.6350], + device='cuda:2'), covar=tensor([0.0205, 0.0070, 0.0784, 0.0078, 0.0068, 0.0086, 0.0080, 0.1013], + device='cuda:2'), in_proj_covar=tensor([0.0087, 0.0080, 0.0095, 0.0095, 0.0084, 0.0075, 0.0084, 0.0097], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 13:14:51,705 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.17 vs. limit=5.0 +2023-03-09 13:15:05,048 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.778e+02 2.596e+02 2.940e+02 3.683e+02 1.082e+03, threshold=5.881e+02, percent-clipped=3.0 +2023-03-09 13:15:06,431 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71677.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:15:16,563 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=71686.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:15:28,096 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71696.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:15:29,076 INFO [train.py:898] (2/4) Epoch 20, batch 2650, loss[loss=0.1424, simple_loss=0.2226, pruned_loss=0.03107, over 18433.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2542, pruned_loss=0.03814, over 3584283.53 frames. ], batch size: 43, lr: 5.61e-03, grad_scale: 8.0 +2023-03-09 13:16:12,125 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=71734.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:16:27,159 INFO [train.py:898] (2/4) Epoch 20, batch 2700, loss[loss=0.1593, simple_loss=0.2479, pruned_loss=0.03531, over 18260.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2546, pruned_loss=0.03834, over 3574484.31 frames. ], batch size: 47, lr: 5.61e-03, grad_scale: 8.0 +2023-03-09 13:17:01,230 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.908e+02 2.794e+02 3.196e+02 3.940e+02 6.442e+02, threshold=6.393e+02, percent-clipped=2.0 +2023-03-09 13:17:25,398 INFO [train.py:898] (2/4) Epoch 20, batch 2750, loss[loss=0.1568, simple_loss=0.2419, pruned_loss=0.03579, over 18268.00 frames. ], tot_loss[loss=0.1657, simple_loss=0.2549, pruned_loss=0.03821, over 3582351.45 frames. ], batch size: 45, lr: 5.61e-03, grad_scale: 8.0 +2023-03-09 13:18:23,304 INFO [train.py:898] (2/4) Epoch 20, batch 2800, loss[loss=0.1873, simple_loss=0.2757, pruned_loss=0.04948, over 18367.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.2551, pruned_loss=0.0383, over 3577879.43 frames. ], batch size: 56, lr: 5.61e-03, grad_scale: 8.0 +2023-03-09 13:18:56,840 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.812e+02 2.512e+02 3.220e+02 3.992e+02 1.001e+03, threshold=6.440e+02, percent-clipped=3.0 +2023-03-09 13:19:22,130 INFO [train.py:898] (2/4) Epoch 20, batch 2850, loss[loss=0.1403, simple_loss=0.2246, pruned_loss=0.028, over 17246.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.2548, pruned_loss=0.0384, over 3573316.19 frames. ], batch size: 38, lr: 5.61e-03, grad_scale: 8.0 +2023-03-09 13:19:23,718 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5549, 3.5372, 3.4269, 3.1048, 3.3496, 2.7978, 2.6755, 3.6102], + device='cuda:2'), covar=tensor([0.0069, 0.0089, 0.0074, 0.0134, 0.0093, 0.0189, 0.0206, 0.0064], + device='cuda:2'), in_proj_covar=tensor([0.0135, 0.0153, 0.0130, 0.0182, 0.0138, 0.0176, 0.0179, 0.0116], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 13:19:49,034 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71919.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:20:21,490 INFO [train.py:898] (2/4) Epoch 20, batch 2900, loss[loss=0.1534, simple_loss=0.2424, pruned_loss=0.03214, over 18276.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.2546, pruned_loss=0.03851, over 3559816.87 frames. ], batch size: 49, lr: 5.60e-03, grad_scale: 8.0 +2023-03-09 13:20:51,387 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71972.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:20:55,784 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.736e+02 2.545e+02 2.901e+02 3.371e+02 5.685e+02, threshold=5.802e+02, percent-clipped=0.0 +2023-03-09 13:21:00,738 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71980.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:21:20,629 INFO [train.py:898] (2/4) Epoch 20, batch 2950, loss[loss=0.1744, simple_loss=0.2665, pruned_loss=0.04116, over 17224.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2538, pruned_loss=0.03809, over 3561783.92 frames. ], batch size: 78, lr: 5.60e-03, grad_scale: 8.0 +2023-03-09 13:21:44,027 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4987, 2.2578, 2.3759, 2.7014, 2.9744, 5.0121, 4.8198, 3.4590], + device='cuda:2'), covar=tensor([0.2193, 0.3092, 0.3806, 0.2121, 0.3412, 0.0243, 0.0398, 0.0984], + device='cuda:2'), in_proj_covar=tensor([0.0298, 0.0346, 0.0380, 0.0277, 0.0388, 0.0240, 0.0295, 0.0254], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 13:22:17,074 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0467, 5.2289, 2.4057, 5.0455, 4.9183, 5.1785, 4.9303, 2.2675], + device='cuda:2'), covar=tensor([0.0216, 0.0099, 0.1107, 0.0145, 0.0095, 0.0142, 0.0162, 0.1701], + device='cuda:2'), in_proj_covar=tensor([0.0085, 0.0079, 0.0093, 0.0093, 0.0083, 0.0074, 0.0083, 0.0095], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 13:22:17,682 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-09 13:22:24,557 INFO [train.py:898] (2/4) Epoch 20, batch 3000, loss[loss=0.1615, simple_loss=0.2549, pruned_loss=0.03406, over 18472.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2536, pruned_loss=0.03797, over 3564853.26 frames. ], batch size: 59, lr: 5.60e-03, grad_scale: 8.0 +2023-03-09 13:22:24,557 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 13:22:36,472 INFO [train.py:932] (2/4) Epoch 20, validation: loss=0.1501, simple_loss=0.25, pruned_loss=0.02514, over 944034.00 frames. +2023-03-09 13:22:36,472 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 13:22:45,376 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7289, 3.4830, 4.7507, 4.2655, 3.2686, 2.8686, 4.1942, 4.9544], + device='cuda:2'), covar=tensor([0.0793, 0.1521, 0.0191, 0.0395, 0.0893, 0.1213, 0.0401, 0.0302], + device='cuda:2'), in_proj_covar=tensor([0.0148, 0.0272, 0.0149, 0.0180, 0.0190, 0.0191, 0.0194, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 13:23:10,306 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.792e+02 2.528e+02 3.006e+02 3.462e+02 4.966e+02, threshold=6.013e+02, percent-clipped=0.0 +2023-03-09 13:23:33,868 INFO [train.py:898] (2/4) Epoch 20, batch 3050, loss[loss=0.1621, simple_loss=0.2593, pruned_loss=0.03245, over 18417.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2545, pruned_loss=0.03824, over 3570131.98 frames. ], batch size: 48, lr: 5.60e-03, grad_scale: 8.0 +2023-03-09 13:24:11,907 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72129.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:24:31,903 INFO [train.py:898] (2/4) Epoch 20, batch 3100, loss[loss=0.1946, simple_loss=0.2806, pruned_loss=0.05427, over 18151.00 frames. ], tot_loss[loss=0.1657, simple_loss=0.2546, pruned_loss=0.03836, over 3576983.30 frames. ], batch size: 62, lr: 5.60e-03, grad_scale: 8.0 +2023-03-09 13:24:55,023 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.67 vs. limit=2.0 +2023-03-09 13:25:05,467 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.922e+02 2.879e+02 3.465e+02 4.058e+02 1.741e+03, threshold=6.931e+02, percent-clipped=3.0 +2023-03-09 13:25:21,393 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8524, 3.6109, 5.0081, 2.7618, 4.2854, 2.4423, 3.0503, 1.7876], + device='cuda:2'), covar=tensor([0.1196, 0.1024, 0.0156, 0.1050, 0.0598, 0.2781, 0.2660, 0.2247], + device='cuda:2'), in_proj_covar=tensor([0.0217, 0.0243, 0.0184, 0.0193, 0.0255, 0.0268, 0.0319, 0.0231], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 13:25:22,413 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72190.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:25:29,916 INFO [train.py:898] (2/4) Epoch 20, batch 3150, loss[loss=0.1825, simple_loss=0.2776, pruned_loss=0.0437, over 18253.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2544, pruned_loss=0.03798, over 3594660.67 frames. ], batch size: 60, lr: 5.59e-03, grad_scale: 8.0 +2023-03-09 13:26:28,293 INFO [train.py:898] (2/4) Epoch 20, batch 3200, loss[loss=0.1728, simple_loss=0.2682, pruned_loss=0.03871, over 18213.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2551, pruned_loss=0.03836, over 3585094.48 frames. ], batch size: 60, lr: 5.59e-03, grad_scale: 8.0 +2023-03-09 13:26:58,299 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72272.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:27:01,431 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72275.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:27:02,249 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.897e+02 2.541e+02 3.039e+02 3.727e+02 6.894e+02, threshold=6.078e+02, percent-clipped=0.0 +2023-03-09 13:27:26,964 INFO [train.py:898] (2/4) Epoch 20, batch 3250, loss[loss=0.1518, simple_loss=0.2386, pruned_loss=0.03253, over 18262.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2545, pruned_loss=0.03815, over 3586051.25 frames. ], batch size: 47, lr: 5.59e-03, grad_scale: 8.0 +2023-03-09 13:27:54,549 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72320.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:28:01,511 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72326.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:28:26,328 INFO [train.py:898] (2/4) Epoch 20, batch 3300, loss[loss=0.1658, simple_loss=0.261, pruned_loss=0.03526, over 18282.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2546, pruned_loss=0.03817, over 3582635.49 frames. ], batch size: 60, lr: 5.59e-03, grad_scale: 8.0 +2023-03-09 13:28:52,576 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.34 vs. limit=5.0 +2023-03-09 13:28:59,547 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.671e+02 2.585e+02 3.093e+02 3.745e+02 6.095e+02, threshold=6.186e+02, percent-clipped=1.0 +2023-03-09 13:29:13,008 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72387.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:29:17,036 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5582, 2.4100, 2.3046, 2.6031, 2.7805, 3.7703, 3.7393, 3.1591], + device='cuda:2'), covar=tensor([0.1902, 0.2596, 0.3489, 0.1921, 0.2860, 0.0462, 0.0547, 0.0771], + device='cuda:2'), in_proj_covar=tensor([0.0297, 0.0345, 0.0379, 0.0275, 0.0387, 0.0240, 0.0294, 0.0254], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 13:29:24,269 INFO [train.py:898] (2/4) Epoch 20, batch 3350, loss[loss=0.2049, simple_loss=0.28, pruned_loss=0.06493, over 13005.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2548, pruned_loss=0.03813, over 3577661.46 frames. ], batch size: 131, lr: 5.59e-03, grad_scale: 8.0 +2023-03-09 13:29:52,915 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5808, 2.2220, 2.5335, 2.5224, 3.1005, 4.5688, 4.4274, 3.2731], + device='cuda:2'), covar=tensor([0.1846, 0.2429, 0.2966, 0.1953, 0.2342, 0.0276, 0.0440, 0.0942], + device='cuda:2'), in_proj_covar=tensor([0.0299, 0.0347, 0.0380, 0.0277, 0.0389, 0.0240, 0.0296, 0.0255], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 13:30:23,820 INFO [train.py:898] (2/4) Epoch 20, batch 3400, loss[loss=0.1526, simple_loss=0.2286, pruned_loss=0.03834, over 17722.00 frames. ], tot_loss[loss=0.1657, simple_loss=0.2551, pruned_loss=0.03813, over 3577556.59 frames. ], batch size: 39, lr: 5.58e-03, grad_scale: 8.0 +2023-03-09 13:30:25,741 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.49 vs. limit=5.0 +2023-03-09 13:30:39,848 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8116, 4.0517, 2.3757, 4.0563, 5.1175, 2.5406, 3.6427, 3.8755], + device='cuda:2'), covar=tensor([0.0184, 0.1178, 0.1638, 0.0547, 0.0084, 0.1245, 0.0738, 0.0758], + device='cuda:2'), in_proj_covar=tensor([0.0163, 0.0268, 0.0202, 0.0193, 0.0123, 0.0181, 0.0213, 0.0222], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 13:30:41,926 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8502, 5.3483, 5.3433, 5.3217, 4.8409, 5.2747, 4.7008, 5.2089], + device='cuda:2'), covar=tensor([0.0219, 0.0257, 0.0172, 0.0367, 0.0386, 0.0192, 0.1017, 0.0293], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0260, 0.0250, 0.0325, 0.0266, 0.0267, 0.0307, 0.0258], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 13:30:57,206 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.915e+02 2.809e+02 3.353e+02 3.945e+02 1.222e+03, threshold=6.706e+02, percent-clipped=5.0 +2023-03-09 13:31:07,830 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72485.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:31:22,253 INFO [train.py:898] (2/4) Epoch 20, batch 3450, loss[loss=0.1768, simple_loss=0.2668, pruned_loss=0.04341, over 18014.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.2544, pruned_loss=0.03767, over 3589572.74 frames. ], batch size: 65, lr: 5.58e-03, grad_scale: 8.0 +2023-03-09 13:32:20,080 INFO [train.py:898] (2/4) Epoch 20, batch 3500, loss[loss=0.1396, simple_loss=0.2275, pruned_loss=0.02584, over 18377.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2534, pruned_loss=0.0373, over 3590695.40 frames. ], batch size: 46, lr: 5.58e-03, grad_scale: 16.0 +2023-03-09 13:32:52,160 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72575.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:32:53,024 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.775e+02 2.575e+02 2.979e+02 3.509e+02 6.316e+02, threshold=5.957e+02, percent-clipped=0.0 +2023-03-09 13:33:16,609 INFO [train.py:898] (2/4) Epoch 20, batch 3550, loss[loss=0.173, simple_loss=0.2631, pruned_loss=0.0414, over 18492.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2537, pruned_loss=0.03754, over 3588947.43 frames. ], batch size: 51, lr: 5.58e-03, grad_scale: 16.0 +2023-03-09 13:33:45,204 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72623.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:34:10,357 INFO [train.py:898] (2/4) Epoch 20, batch 3600, loss[loss=0.1483, simple_loss=0.2391, pruned_loss=0.02872, over 18545.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2537, pruned_loss=0.03755, over 3591926.15 frames. ], batch size: 49, lr: 5.58e-03, grad_scale: 8.0 +2023-03-09 13:34:42,424 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.959e+02 2.531e+02 3.153e+02 3.638e+02 9.354e+02, threshold=6.307e+02, percent-clipped=0.0 +2023-03-09 13:35:15,793 INFO [train.py:898] (2/4) Epoch 21, batch 0, loss[loss=0.1639, simple_loss=0.2507, pruned_loss=0.03859, over 18269.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2507, pruned_loss=0.03859, over 18269.00 frames. ], batch size: 47, lr: 5.44e-03, grad_scale: 8.0 +2023-03-09 13:35:15,794 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 13:35:27,494 INFO [train.py:932] (2/4) Epoch 21, validation: loss=0.1511, simple_loss=0.2511, pruned_loss=0.02556, over 944034.00 frames. +2023-03-09 13:35:27,495 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 13:35:28,913 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72682.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:36:23,192 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7755, 3.9653, 2.3767, 4.0535, 5.0733, 2.5511, 3.7668, 3.8372], + device='cuda:2'), covar=tensor([0.0176, 0.1151, 0.1708, 0.0585, 0.0087, 0.1261, 0.0669, 0.0739], + device='cuda:2'), in_proj_covar=tensor([0.0164, 0.0269, 0.0202, 0.0194, 0.0125, 0.0183, 0.0215, 0.0222], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 13:36:26,172 INFO [train.py:898] (2/4) Epoch 21, batch 50, loss[loss=0.161, simple_loss=0.2507, pruned_loss=0.03568, over 18478.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.2542, pruned_loss=0.0378, over 809303.84 frames. ], batch size: 51, lr: 5.44e-03, grad_scale: 8.0 +2023-03-09 13:37:20,702 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.036e+02 2.565e+02 3.180e+02 3.639e+02 9.362e+02, threshold=6.360e+02, percent-clipped=2.0 +2023-03-09 13:37:25,032 INFO [train.py:898] (2/4) Epoch 21, batch 100, loss[loss=0.1402, simple_loss=0.2222, pruned_loss=0.02903, over 17639.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2532, pruned_loss=0.03752, over 1428906.37 frames. ], batch size: 39, lr: 5.43e-03, grad_scale: 8.0 +2023-03-09 13:37:29,829 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72785.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:37:49,334 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4345, 2.0425, 2.3384, 2.4751, 2.5885, 4.7627, 4.6078, 3.3467], + device='cuda:2'), covar=tensor([0.2196, 0.3438, 0.3682, 0.2311, 0.3988, 0.0284, 0.0420, 0.1036], + device='cuda:2'), in_proj_covar=tensor([0.0298, 0.0345, 0.0379, 0.0277, 0.0387, 0.0239, 0.0295, 0.0253], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 13:38:23,955 INFO [train.py:898] (2/4) Epoch 21, batch 150, loss[loss=0.1827, simple_loss=0.2723, pruned_loss=0.04659, over 18370.00 frames. ], tot_loss[loss=0.1635, simple_loss=0.2528, pruned_loss=0.03713, over 1921389.18 frames. ], batch size: 56, lr: 5.43e-03, grad_scale: 8.0 +2023-03-09 13:38:26,430 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=72833.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:39:17,914 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.843e+02 2.851e+02 3.537e+02 4.282e+02 1.325e+03, threshold=7.073e+02, percent-clipped=5.0 +2023-03-09 13:39:22,614 INFO [train.py:898] (2/4) Epoch 21, batch 200, loss[loss=0.16, simple_loss=0.2521, pruned_loss=0.03395, over 18617.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2533, pruned_loss=0.03713, over 2291718.82 frames. ], batch size: 52, lr: 5.43e-03, grad_scale: 8.0 +2023-03-09 13:39:37,549 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72894.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:40:04,470 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5446, 5.0188, 5.0110, 5.0060, 4.5514, 4.9488, 4.3984, 4.9017], + device='cuda:2'), covar=tensor([0.0248, 0.0299, 0.0211, 0.0433, 0.0394, 0.0244, 0.1075, 0.0342], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0261, 0.0251, 0.0326, 0.0265, 0.0268, 0.0305, 0.0257], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 13:40:21,001 INFO [train.py:898] (2/4) Epoch 21, batch 250, loss[loss=0.1659, simple_loss=0.2585, pruned_loss=0.0366, over 15730.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2527, pruned_loss=0.03701, over 2569108.89 frames. ], batch size: 94, lr: 5.43e-03, grad_scale: 8.0 +2023-03-09 13:40:46,804 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3782, 3.9406, 3.9085, 3.1158, 3.4292, 3.1229, 2.4149, 2.2779], + device='cuda:2'), covar=tensor([0.0259, 0.0165, 0.0102, 0.0311, 0.0357, 0.0261, 0.0709, 0.0838], + device='cuda:2'), in_proj_covar=tensor([0.0070, 0.0058, 0.0062, 0.0068, 0.0088, 0.0066, 0.0076, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 13:40:48,969 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72955.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:40:59,852 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72964.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 13:41:14,359 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.784e+02 2.579e+02 3.135e+02 3.848e+02 6.941e+02, threshold=6.270e+02, percent-clipped=0.0 +2023-03-09 13:41:18,868 INFO [train.py:898] (2/4) Epoch 21, batch 300, loss[loss=0.1817, simple_loss=0.2689, pruned_loss=0.04727, over 17915.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2533, pruned_loss=0.03732, over 2798579.83 frames. ], batch size: 65, lr: 5.43e-03, grad_scale: 8.0 +2023-03-09 13:41:20,213 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72982.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:42:11,344 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73025.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 13:42:16,849 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73030.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:42:17,824 INFO [train.py:898] (2/4) Epoch 21, batch 350, loss[loss=0.1659, simple_loss=0.2642, pruned_loss=0.03387, over 18406.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2545, pruned_loss=0.03747, over 2979363.63 frames. ], batch size: 52, lr: 5.43e-03, grad_scale: 8.0 +2023-03-09 13:43:11,921 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.597e+02 2.497e+02 3.036e+02 3.677e+02 5.898e+02, threshold=6.073e+02, percent-clipped=0.0 +2023-03-09 13:43:16,499 INFO [train.py:898] (2/4) Epoch 21, batch 400, loss[loss=0.1463, simple_loss=0.2361, pruned_loss=0.02824, over 18257.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2545, pruned_loss=0.0372, over 3114596.09 frames. ], batch size: 45, lr: 5.42e-03, grad_scale: 8.0 +2023-03-09 13:43:48,771 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9973, 5.0286, 5.0549, 4.8444, 4.8617, 4.8356, 5.1863, 5.1348], + device='cuda:2'), covar=tensor([0.0061, 0.0064, 0.0055, 0.0097, 0.0059, 0.0136, 0.0065, 0.0091], + device='cuda:2'), in_proj_covar=tensor([0.0092, 0.0068, 0.0073, 0.0090, 0.0074, 0.0103, 0.0085, 0.0085], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 13:44:14,449 INFO [train.py:898] (2/4) Epoch 21, batch 450, loss[loss=0.1764, simple_loss=0.267, pruned_loss=0.04288, over 18260.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2537, pruned_loss=0.03706, over 3220720.90 frames. ], batch size: 60, lr: 5.42e-03, grad_scale: 8.0 +2023-03-09 13:44:47,163 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6499, 2.8723, 2.6513, 2.9874, 3.7209, 3.6001, 3.2564, 3.0923], + device='cuda:2'), covar=tensor([0.0162, 0.0286, 0.0535, 0.0355, 0.0165, 0.0150, 0.0322, 0.0356], + device='cuda:2'), in_proj_covar=tensor([0.0138, 0.0131, 0.0161, 0.0154, 0.0127, 0.0114, 0.0150, 0.0153], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 13:45:06,945 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.937e+02 2.746e+02 3.322e+02 3.984e+02 6.045e+02, threshold=6.644e+02, percent-clipped=0.0 +2023-03-09 13:45:12,794 INFO [train.py:898] (2/4) Epoch 21, batch 500, loss[loss=0.1808, simple_loss=0.2673, pruned_loss=0.04711, over 12564.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2538, pruned_loss=0.03712, over 3301120.09 frames. ], batch size: 130, lr: 5.42e-03, grad_scale: 8.0 +2023-03-09 13:45:38,515 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4824, 2.7544, 2.5512, 2.8906, 3.5869, 3.5546, 3.1122, 2.9568], + device='cuda:2'), covar=tensor([0.0221, 0.0314, 0.0535, 0.0395, 0.0212, 0.0149, 0.0373, 0.0390], + device='cuda:2'), in_proj_covar=tensor([0.0140, 0.0133, 0.0162, 0.0155, 0.0129, 0.0115, 0.0152, 0.0154], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 13:46:10,623 INFO [train.py:898] (2/4) Epoch 21, batch 550, loss[loss=0.1864, simple_loss=0.2735, pruned_loss=0.04966, over 17957.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2536, pruned_loss=0.03706, over 3366577.55 frames. ], batch size: 65, lr: 5.42e-03, grad_scale: 8.0 +2023-03-09 13:46:32,309 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73250.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:46:53,103 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.82 vs. limit=5.0 +2023-03-09 13:47:03,602 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.734e+02 2.558e+02 3.003e+02 3.637e+02 1.061e+03, threshold=6.005e+02, percent-clipped=3.0 +2023-03-09 13:47:08,046 INFO [train.py:898] (2/4) Epoch 21, batch 600, loss[loss=0.1799, simple_loss=0.2685, pruned_loss=0.04566, over 17140.00 frames. ], tot_loss[loss=0.1635, simple_loss=0.2531, pruned_loss=0.037, over 3424814.27 frames. ], batch size: 78, lr: 5.42e-03, grad_scale: 8.0 +2023-03-09 13:47:22,541 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73293.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:47:53,082 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73320.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 13:48:06,376 INFO [train.py:898] (2/4) Epoch 21, batch 650, loss[loss=0.184, simple_loss=0.2763, pruned_loss=0.0459, over 18341.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2534, pruned_loss=0.03713, over 3462766.85 frames. ], batch size: 56, lr: 5.41e-03, grad_scale: 8.0 +2023-03-09 13:48:33,540 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73354.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:49:00,062 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.798e+02 2.707e+02 3.166e+02 4.000e+02 8.066e+02, threshold=6.331e+02, percent-clipped=2.0 +2023-03-09 13:49:04,586 INFO [train.py:898] (2/4) Epoch 21, batch 700, loss[loss=0.1604, simple_loss=0.2493, pruned_loss=0.03576, over 18391.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2533, pruned_loss=0.03737, over 3486415.27 frames. ], batch size: 48, lr: 5.41e-03, grad_scale: 8.0 +2023-03-09 13:49:43,902 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7857, 3.7699, 5.0102, 2.6686, 4.4092, 2.5516, 2.9909, 1.8003], + device='cuda:2'), covar=tensor([0.1196, 0.0871, 0.0139, 0.1017, 0.0476, 0.2678, 0.2788, 0.2140], + device='cuda:2'), in_proj_covar=tensor([0.0221, 0.0246, 0.0188, 0.0198, 0.0259, 0.0273, 0.0325, 0.0235], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 13:50:02,190 INFO [train.py:898] (2/4) Epoch 21, batch 750, loss[loss=0.1545, simple_loss=0.2379, pruned_loss=0.03555, over 18442.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2536, pruned_loss=0.03733, over 3501093.94 frames. ], batch size: 43, lr: 5.41e-03, grad_scale: 8.0 +2023-03-09 13:50:09,085 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5929, 3.4492, 3.3695, 2.9489, 3.2080, 2.6342, 2.7924, 3.4396], + device='cuda:2'), covar=tensor([0.0089, 0.0130, 0.0108, 0.0195, 0.0139, 0.0265, 0.0256, 0.0091], + device='cuda:2'), in_proj_covar=tensor([0.0136, 0.0156, 0.0132, 0.0184, 0.0139, 0.0177, 0.0182, 0.0120], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 13:50:15,132 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0643, 3.4009, 3.4103, 2.9183, 3.0582, 2.8549, 2.4603, 2.3125], + device='cuda:2'), covar=tensor([0.0271, 0.0179, 0.0119, 0.0291, 0.0329, 0.0246, 0.0609, 0.0689], + device='cuda:2'), in_proj_covar=tensor([0.0070, 0.0059, 0.0062, 0.0068, 0.0089, 0.0066, 0.0077, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 13:50:54,399 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.921e+02 2.615e+02 3.165e+02 3.785e+02 6.213e+02, threshold=6.329e+02, percent-clipped=0.0 +2023-03-09 13:50:59,647 INFO [train.py:898] (2/4) Epoch 21, batch 800, loss[loss=0.1661, simple_loss=0.2573, pruned_loss=0.03742, over 18291.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.2538, pruned_loss=0.03736, over 3518250.24 frames. ], batch size: 54, lr: 5.41e-03, grad_scale: 8.0 +2023-03-09 13:51:01,385 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-03-09 13:51:43,699 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.18 vs. limit=5.0 +2023-03-09 13:51:56,991 INFO [train.py:898] (2/4) Epoch 21, batch 850, loss[loss=0.1563, simple_loss=0.2452, pruned_loss=0.03368, over 18398.00 frames. ], tot_loss[loss=0.1637, simple_loss=0.2533, pruned_loss=0.03704, over 3536608.74 frames. ], batch size: 48, lr: 5.41e-03, grad_scale: 8.0 +2023-03-09 13:52:20,208 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73550.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:52:49,658 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-09 13:52:50,040 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.633e+02 2.526e+02 3.131e+02 3.569e+02 6.821e+02, threshold=6.262e+02, percent-clipped=1.0 +2023-03-09 13:52:54,607 INFO [train.py:898] (2/4) Epoch 21, batch 900, loss[loss=0.1619, simple_loss=0.2556, pruned_loss=0.03409, over 18377.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2532, pruned_loss=0.03715, over 3539618.82 frames. ], batch size: 50, lr: 5.41e-03, grad_scale: 8.0 +2023-03-09 13:53:06,097 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73590.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:53:15,955 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73598.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:53:32,107 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5668, 2.9524, 4.3560, 3.6585, 2.7632, 4.6658, 3.9250, 2.9023], + device='cuda:2'), covar=tensor([0.0501, 0.1408, 0.0260, 0.0432, 0.1495, 0.0175, 0.0517, 0.1063], + device='cuda:2'), in_proj_covar=tensor([0.0209, 0.0239, 0.0211, 0.0162, 0.0224, 0.0213, 0.0246, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 13:53:40,752 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73620.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 13:53:53,034 INFO [train.py:898] (2/4) Epoch 21, batch 950, loss[loss=0.1496, simple_loss=0.2352, pruned_loss=0.03197, over 18258.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2539, pruned_loss=0.03728, over 3547918.97 frames. ], batch size: 45, lr: 5.40e-03, grad_scale: 8.0 +2023-03-09 13:54:15,551 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73649.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:54:18,574 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73651.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:54:37,481 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73668.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 13:54:47,213 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.873e+02 2.598e+02 2.980e+02 3.796e+02 7.725e+02, threshold=5.960e+02, percent-clipped=2.0 +2023-03-09 13:54:51,743 INFO [train.py:898] (2/4) Epoch 21, batch 1000, loss[loss=0.1604, simple_loss=0.2525, pruned_loss=0.03415, over 18396.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2535, pruned_loss=0.03699, over 3559017.48 frames. ], batch size: 52, lr: 5.40e-03, grad_scale: 8.0 +2023-03-09 13:55:49,540 INFO [train.py:898] (2/4) Epoch 21, batch 1050, loss[loss=0.1786, simple_loss=0.2703, pruned_loss=0.04341, over 18378.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2537, pruned_loss=0.03701, over 3562498.06 frames. ], batch size: 56, lr: 5.40e-03, grad_scale: 8.0 +2023-03-09 13:55:50,921 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73732.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:56:43,398 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.657e+02 2.593e+02 3.039e+02 3.732e+02 7.995e+02, threshold=6.078e+02, percent-clipped=2.0 +2023-03-09 13:56:47,986 INFO [train.py:898] (2/4) Epoch 21, batch 1100, loss[loss=0.1545, simple_loss=0.2489, pruned_loss=0.03003, over 18620.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2544, pruned_loss=0.03733, over 3558591.85 frames. ], batch size: 52, lr: 5.40e-03, grad_scale: 8.0 +2023-03-09 13:57:01,675 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73793.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 13:57:46,778 INFO [train.py:898] (2/4) Epoch 21, batch 1150, loss[loss=0.1646, simple_loss=0.2606, pruned_loss=0.03432, over 18492.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2538, pruned_loss=0.03714, over 3567544.62 frames. ], batch size: 51, lr: 5.40e-03, grad_scale: 8.0 +2023-03-09 13:58:40,425 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.636e+02 2.572e+02 3.032e+02 3.666e+02 7.271e+02, threshold=6.063e+02, percent-clipped=3.0 +2023-03-09 13:58:44,858 INFO [train.py:898] (2/4) Epoch 21, batch 1200, loss[loss=0.1621, simple_loss=0.2561, pruned_loss=0.03408, over 18471.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2539, pruned_loss=0.03716, over 3582525.17 frames. ], batch size: 53, lr: 5.39e-03, grad_scale: 8.0 +2023-03-09 13:58:53,149 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.38 vs. limit=5.0 +2023-03-09 13:59:00,901 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-03-09 13:59:42,882 INFO [train.py:898] (2/4) Epoch 21, batch 1250, loss[loss=0.1324, simple_loss=0.2209, pruned_loss=0.02193, over 18491.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.254, pruned_loss=0.03747, over 3575925.07 frames. ], batch size: 44, lr: 5.39e-03, grad_scale: 8.0 +2023-03-09 14:00:00,232 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73946.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:00:03,872 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73949.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:00:22,598 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6506, 2.3163, 2.5878, 2.6239, 3.1351, 4.6665, 4.5855, 3.2845], + device='cuda:2'), covar=tensor([0.1756, 0.2399, 0.2860, 0.1811, 0.2388, 0.0231, 0.0374, 0.0900], + device='cuda:2'), in_proj_covar=tensor([0.0299, 0.0347, 0.0382, 0.0278, 0.0388, 0.0240, 0.0296, 0.0255], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 14:00:37,357 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.681e+02 2.538e+02 2.943e+02 3.615e+02 7.778e+02, threshold=5.886e+02, percent-clipped=1.0 +2023-03-09 14:00:41,923 INFO [train.py:898] (2/4) Epoch 21, batch 1300, loss[loss=0.173, simple_loss=0.2628, pruned_loss=0.04161, over 18299.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2534, pruned_loss=0.03716, over 3585503.96 frames. ], batch size: 49, lr: 5.39e-03, grad_scale: 8.0 +2023-03-09 14:01:00,265 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=73997.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:01:34,883 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6988, 3.4271, 2.2220, 4.3975, 3.0327, 4.2345, 2.5120, 4.0493], + device='cuda:2'), covar=tensor([0.0561, 0.0821, 0.1323, 0.0501, 0.0825, 0.0326, 0.1130, 0.0357], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0226, 0.0189, 0.0281, 0.0191, 0.0262, 0.0202, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 14:01:45,718 INFO [train.py:898] (2/4) Epoch 21, batch 1350, loss[loss=0.1375, simple_loss=0.221, pruned_loss=0.027, over 18513.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2531, pruned_loss=0.03729, over 3574485.10 frames. ], batch size: 44, lr: 5.39e-03, grad_scale: 8.0 +2023-03-09 14:02:19,913 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0844, 3.7560, 5.1250, 2.9310, 4.4099, 2.6610, 3.1224, 1.7224], + device='cuda:2'), covar=tensor([0.1046, 0.0911, 0.0121, 0.0907, 0.0575, 0.2553, 0.2710, 0.2278], + device='cuda:2'), in_proj_covar=tensor([0.0221, 0.0246, 0.0189, 0.0198, 0.0260, 0.0272, 0.0323, 0.0235], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 14:02:22,969 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74064.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:02:39,208 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.953e+02 2.917e+02 3.282e+02 4.135e+02 8.979e+02, threshold=6.564e+02, percent-clipped=10.0 +2023-03-09 14:02:43,766 INFO [train.py:898] (2/4) Epoch 21, batch 1400, loss[loss=0.1573, simple_loss=0.2534, pruned_loss=0.03059, over 18294.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2532, pruned_loss=0.0373, over 3584668.90 frames. ], batch size: 49, lr: 5.39e-03, grad_scale: 8.0 +2023-03-09 14:02:51,618 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74088.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:03:35,135 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74125.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:03:41,405 INFO [train.py:898] (2/4) Epoch 21, batch 1450, loss[loss=0.1623, simple_loss=0.2497, pruned_loss=0.03748, over 18302.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2546, pruned_loss=0.03773, over 3583573.98 frames. ], batch size: 49, lr: 5.39e-03, grad_scale: 8.0 +2023-03-09 14:04:35,773 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.991e+02 2.519e+02 2.943e+02 3.724e+02 1.372e+03, threshold=5.886e+02, percent-clipped=4.0 +2023-03-09 14:04:40,831 INFO [train.py:898] (2/4) Epoch 21, batch 1500, loss[loss=0.1432, simple_loss=0.2359, pruned_loss=0.02525, over 18353.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2537, pruned_loss=0.03721, over 3586740.53 frames. ], batch size: 46, lr: 5.38e-03, grad_scale: 8.0 +2023-03-09 14:05:06,328 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9737, 5.5127, 3.0522, 5.2425, 5.2303, 5.4844, 5.2913, 3.0680], + device='cuda:2'), covar=tensor([0.0221, 0.0052, 0.0707, 0.0073, 0.0059, 0.0059, 0.0078, 0.0839], + device='cuda:2'), in_proj_covar=tensor([0.0088, 0.0081, 0.0095, 0.0095, 0.0084, 0.0075, 0.0084, 0.0098], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 14:05:39,323 INFO [train.py:898] (2/4) Epoch 21, batch 1550, loss[loss=0.1658, simple_loss=0.2575, pruned_loss=0.03704, over 15954.00 frames. ], tot_loss[loss=0.1637, simple_loss=0.2536, pruned_loss=0.03692, over 3588393.55 frames. ], batch size: 94, lr: 5.38e-03, grad_scale: 8.0 +2023-03-09 14:05:50,257 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74240.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:05:53,548 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.3390, 4.7292, 4.3086, 4.5798, 4.4256, 4.3684, 4.8213, 4.7197], + device='cuda:2'), covar=tensor([0.1032, 0.0737, 0.2027, 0.0755, 0.1330, 0.0698, 0.0673, 0.0785], + device='cuda:2'), in_proj_covar=tensor([0.0609, 0.0524, 0.0379, 0.0547, 0.0737, 0.0542, 0.0739, 0.0566], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 14:05:57,052 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74246.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:05:59,357 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7429, 4.6010, 4.6366, 3.5033, 3.8132, 3.5567, 2.5705, 2.4118], + device='cuda:2'), covar=tensor([0.0254, 0.0138, 0.0064, 0.0284, 0.0324, 0.0235, 0.0768, 0.0935], + device='cuda:2'), in_proj_covar=tensor([0.0069, 0.0058, 0.0061, 0.0067, 0.0087, 0.0065, 0.0076, 0.0083], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 14:06:00,387 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4419, 5.4197, 5.0734, 5.3684, 5.3408, 4.7130, 5.2438, 5.0384], + device='cuda:2'), covar=tensor([0.0441, 0.0432, 0.1330, 0.0721, 0.0542, 0.0484, 0.0446, 0.1021], + device='cuda:2'), in_proj_covar=tensor([0.0502, 0.0565, 0.0706, 0.0436, 0.0451, 0.0511, 0.0546, 0.0688], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 14:06:32,520 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.761e+02 2.609e+02 3.003e+02 3.447e+02 5.872e+02, threshold=6.005e+02, percent-clipped=0.0 +2023-03-09 14:06:37,155 INFO [train.py:898] (2/4) Epoch 21, batch 1600, loss[loss=0.1546, simple_loss=0.2531, pruned_loss=0.02802, over 16121.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.253, pruned_loss=0.03682, over 3594730.90 frames. ], batch size: 94, lr: 5.38e-03, grad_scale: 8.0 +2023-03-09 14:06:46,923 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2152, 5.2246, 5.4305, 5.5107, 5.1964, 6.0364, 5.6168, 5.2628], + device='cuda:2'), covar=tensor([0.1109, 0.0594, 0.0802, 0.0760, 0.1288, 0.0724, 0.0750, 0.1702], + device='cuda:2'), in_proj_covar=tensor([0.0360, 0.0288, 0.0314, 0.0313, 0.0331, 0.0423, 0.0282, 0.0419], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 14:06:49,307 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3843, 2.8285, 4.1319, 3.6148, 2.6972, 4.4050, 3.8652, 2.8372], + device='cuda:2'), covar=tensor([0.0614, 0.1521, 0.0311, 0.0445, 0.1523, 0.0210, 0.0591, 0.0966], + device='cuda:2'), in_proj_covar=tensor([0.0205, 0.0233, 0.0206, 0.0158, 0.0218, 0.0208, 0.0241, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 14:06:53,630 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74294.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:07:01,871 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74301.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:07:35,709 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7849, 2.8121, 2.7250, 3.0199, 3.7867, 3.6239, 3.2420, 3.0759], + device='cuda:2'), covar=tensor([0.0215, 0.0295, 0.0536, 0.0359, 0.0184, 0.0165, 0.0368, 0.0387], + device='cuda:2'), in_proj_covar=tensor([0.0140, 0.0133, 0.0162, 0.0156, 0.0128, 0.0116, 0.0153, 0.0154], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 14:07:36,412 INFO [train.py:898] (2/4) Epoch 21, batch 1650, loss[loss=0.1591, simple_loss=0.2399, pruned_loss=0.03915, over 18586.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2538, pruned_loss=0.03726, over 3574467.83 frames. ], batch size: 45, lr: 5.38e-03, grad_scale: 8.0 +2023-03-09 14:08:30,596 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.874e+02 2.715e+02 3.292e+02 3.947e+02 6.741e+02, threshold=6.584e+02, percent-clipped=1.0 +2023-03-09 14:08:35,092 INFO [train.py:898] (2/4) Epoch 21, batch 1700, loss[loss=0.1933, simple_loss=0.2721, pruned_loss=0.05728, over 18098.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2534, pruned_loss=0.03747, over 3564505.29 frames. ], batch size: 62, lr: 5.38e-03, grad_scale: 8.0 +2023-03-09 14:08:44,105 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74388.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:08:59,137 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6190, 5.0778, 5.0702, 5.0721, 4.6651, 4.9942, 4.4433, 4.9667], + device='cuda:2'), covar=tensor([0.0231, 0.0270, 0.0173, 0.0410, 0.0328, 0.0213, 0.0964, 0.0295], + device='cuda:2'), in_proj_covar=tensor([0.0216, 0.0261, 0.0252, 0.0331, 0.0268, 0.0270, 0.0310, 0.0259], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 14:09:20,334 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74420.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:09:33,170 INFO [train.py:898] (2/4) Epoch 21, batch 1750, loss[loss=0.1976, simple_loss=0.2845, pruned_loss=0.05538, over 18492.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2533, pruned_loss=0.03758, over 3581578.68 frames. ], batch size: 59, lr: 5.37e-03, grad_scale: 8.0 +2023-03-09 14:09:38,769 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74436.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:10:18,439 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0466, 5.5651, 2.9808, 5.3358, 5.2786, 5.5598, 5.3305, 2.8709], + device='cuda:2'), covar=tensor([0.0199, 0.0069, 0.0718, 0.0063, 0.0058, 0.0049, 0.0087, 0.0923], + device='cuda:2'), in_proj_covar=tensor([0.0088, 0.0080, 0.0095, 0.0095, 0.0084, 0.0075, 0.0084, 0.0097], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 14:10:25,803 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.991e+02 2.679e+02 3.149e+02 3.785e+02 6.522e+02, threshold=6.298e+02, percent-clipped=0.0 +2023-03-09 14:10:30,658 INFO [train.py:898] (2/4) Epoch 21, batch 1800, loss[loss=0.1517, simple_loss=0.2466, pruned_loss=0.02844, over 18279.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2534, pruned_loss=0.03731, over 3585786.82 frames. ], batch size: 49, lr: 5.37e-03, grad_scale: 8.0 +2023-03-09 14:11:11,698 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-09 14:11:28,037 INFO [train.py:898] (2/4) Epoch 21, batch 1850, loss[loss=0.1555, simple_loss=0.2521, pruned_loss=0.02943, over 18355.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2538, pruned_loss=0.03727, over 3593931.73 frames. ], batch size: 55, lr: 5.37e-03, grad_scale: 8.0 +2023-03-09 14:12:12,576 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.71 vs. limit=2.0 +2023-03-09 14:12:21,974 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.780e+02 2.538e+02 3.055e+02 3.545e+02 5.986e+02, threshold=6.111e+02, percent-clipped=0.0 +2023-03-09 14:12:26,476 INFO [train.py:898] (2/4) Epoch 21, batch 1900, loss[loss=0.1847, simple_loss=0.2756, pruned_loss=0.04691, over 18356.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2541, pruned_loss=0.03719, over 3590047.66 frames. ], batch size: 56, lr: 5.37e-03, grad_scale: 8.0 +2023-03-09 14:12:43,301 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74595.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:12:44,260 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74596.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:13:24,947 INFO [train.py:898] (2/4) Epoch 21, batch 1950, loss[loss=0.1829, simple_loss=0.2806, pruned_loss=0.04264, over 18279.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2537, pruned_loss=0.03679, over 3598714.28 frames. ], batch size: 57, lr: 5.37e-03, grad_scale: 16.0 +2023-03-09 14:13:40,585 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5982, 3.5711, 3.4792, 3.1248, 3.4177, 2.9313, 2.8086, 3.7217], + device='cuda:2'), covar=tensor([0.0068, 0.0097, 0.0084, 0.0140, 0.0100, 0.0165, 0.0203, 0.0058], + device='cuda:2'), in_proj_covar=tensor([0.0140, 0.0158, 0.0135, 0.0186, 0.0142, 0.0178, 0.0184, 0.0123], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 14:13:56,026 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74656.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:14:19,407 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.749e+02 2.532e+02 2.978e+02 3.558e+02 7.453e+02, threshold=5.957e+02, percent-clipped=1.0 +2023-03-09 14:14:23,894 INFO [train.py:898] (2/4) Epoch 21, batch 2000, loss[loss=0.1435, simple_loss=0.2244, pruned_loss=0.03128, over 18496.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2536, pruned_loss=0.03712, over 3579616.49 frames. ], batch size: 47, lr: 5.37e-03, grad_scale: 16.0 +2023-03-09 14:14:38,344 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9231, 3.7753, 5.1589, 4.4836, 3.6241, 3.3231, 4.7695, 5.3722], + device='cuda:2'), covar=tensor([0.0743, 0.1555, 0.0167, 0.0375, 0.0800, 0.1001, 0.0316, 0.0286], + device='cuda:2'), in_proj_covar=tensor([0.0148, 0.0275, 0.0151, 0.0181, 0.0191, 0.0191, 0.0195, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 14:15:09,466 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74720.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:15:21,706 INFO [train.py:898] (2/4) Epoch 21, batch 2050, loss[loss=0.1518, simple_loss=0.2387, pruned_loss=0.03242, over 18504.00 frames. ], tot_loss[loss=0.1635, simple_loss=0.253, pruned_loss=0.03704, over 3596016.96 frames. ], batch size: 47, lr: 5.36e-03, grad_scale: 16.0 +2023-03-09 14:16:05,344 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74768.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:16:15,323 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.051e+02 2.659e+02 3.081e+02 3.760e+02 7.989e+02, threshold=6.163e+02, percent-clipped=4.0 +2023-03-09 14:16:19,624 INFO [train.py:898] (2/4) Epoch 21, batch 2100, loss[loss=0.1481, simple_loss=0.2468, pruned_loss=0.02472, over 18470.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.253, pruned_loss=0.03709, over 3593026.90 frames. ], batch size: 53, lr: 5.36e-03, grad_scale: 16.0 +2023-03-09 14:16:55,779 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1740, 5.6698, 5.2933, 5.4354, 5.2158, 5.1066, 5.7274, 5.6491], + device='cuda:2'), covar=tensor([0.1190, 0.0752, 0.0534, 0.0779, 0.1373, 0.0740, 0.0583, 0.0721], + device='cuda:2'), in_proj_covar=tensor([0.0615, 0.0532, 0.0385, 0.0554, 0.0747, 0.0552, 0.0750, 0.0572], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 14:17:17,999 INFO [train.py:898] (2/4) Epoch 21, batch 2150, loss[loss=0.1492, simple_loss=0.233, pruned_loss=0.03267, over 18340.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.2535, pruned_loss=0.03754, over 3576484.95 frames. ], batch size: 46, lr: 5.36e-03, grad_scale: 16.0 +2023-03-09 14:17:43,776 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-09 14:18:04,727 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8045, 4.8802, 2.5596, 4.7561, 4.6395, 4.8876, 4.6655, 2.5530], + device='cuda:2'), covar=tensor([0.0237, 0.0093, 0.0846, 0.0113, 0.0083, 0.0092, 0.0117, 0.1111], + device='cuda:2'), in_proj_covar=tensor([0.0088, 0.0081, 0.0095, 0.0095, 0.0084, 0.0075, 0.0084, 0.0097], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 14:18:11,058 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.902e+02 2.637e+02 3.175e+02 3.840e+02 7.861e+02, threshold=6.351e+02, percent-clipped=2.0 +2023-03-09 14:18:15,491 INFO [train.py:898] (2/4) Epoch 21, batch 2200, loss[loss=0.1782, simple_loss=0.2757, pruned_loss=0.04034, over 18480.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2538, pruned_loss=0.03763, over 3577046.11 frames. ], batch size: 53, lr: 5.36e-03, grad_scale: 16.0 +2023-03-09 14:18:32,663 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74896.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:18:35,002 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5827, 3.4607, 2.2000, 4.2966, 2.9931, 4.2061, 2.5055, 3.9508], + device='cuda:2'), covar=tensor([0.0649, 0.0835, 0.1483, 0.0579, 0.0879, 0.0321, 0.1138, 0.0397], + device='cuda:2'), in_proj_covar=tensor([0.0214, 0.0228, 0.0191, 0.0286, 0.0193, 0.0266, 0.0202, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 14:18:47,857 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74909.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:19:13,475 INFO [train.py:898] (2/4) Epoch 21, batch 2250, loss[loss=0.1599, simple_loss=0.2513, pruned_loss=0.03428, over 18389.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.255, pruned_loss=0.03782, over 3577613.77 frames. ], batch size: 50, lr: 5.36e-03, grad_scale: 16.0 +2023-03-09 14:19:28,490 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=74944.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:19:36,429 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74951.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:19:43,919 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4769, 3.8683, 3.8321, 3.1301, 3.3719, 3.1529, 2.4718, 2.1471], + device='cuda:2'), covar=tensor([0.0231, 0.0190, 0.0116, 0.0315, 0.0337, 0.0239, 0.0707, 0.0931], + device='cuda:2'), in_proj_covar=tensor([0.0069, 0.0059, 0.0062, 0.0067, 0.0088, 0.0066, 0.0076, 0.0083], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 14:19:51,969 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74964.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:19:52,277 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.71 vs. limit=2.0 +2023-03-09 14:19:59,317 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74970.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:20:08,790 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.078e+02 2.763e+02 3.199e+02 3.835e+02 8.109e+02, threshold=6.398e+02, percent-clipped=3.0 +2023-03-09 14:20:12,113 INFO [train.py:898] (2/4) Epoch 21, batch 2300, loss[loss=0.1652, simple_loss=0.2616, pruned_loss=0.0344, over 18402.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2556, pruned_loss=0.03774, over 3579545.27 frames. ], batch size: 48, lr: 5.35e-03, grad_scale: 8.0 +2023-03-09 14:20:15,964 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-03-09 14:20:45,229 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.71 vs. limit=2.0 +2023-03-09 14:20:45,233 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-03-09 14:20:48,252 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8875, 5.2053, 2.5749, 5.0842, 4.9700, 5.2213, 4.9714, 2.6948], + device='cuda:2'), covar=tensor([0.0218, 0.0069, 0.0793, 0.0082, 0.0065, 0.0065, 0.0097, 0.0964], + device='cuda:2'), in_proj_covar=tensor([0.0088, 0.0081, 0.0095, 0.0095, 0.0084, 0.0075, 0.0084, 0.0097], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 14:21:03,849 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75025.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:21:10,838 INFO [train.py:898] (2/4) Epoch 21, batch 2350, loss[loss=0.1554, simple_loss=0.2527, pruned_loss=0.02906, over 18564.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.2566, pruned_loss=0.03784, over 3574750.25 frames. ], batch size: 54, lr: 5.35e-03, grad_scale: 8.0 +2023-03-09 14:21:37,028 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-09 14:22:04,856 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.756e+02 2.674e+02 3.137e+02 3.722e+02 6.361e+02, threshold=6.274e+02, percent-clipped=0.0 +2023-03-09 14:22:05,346 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1440, 4.2058, 2.7727, 4.1805, 5.4028, 2.8522, 4.0237, 4.2225], + device='cuda:2'), covar=tensor([0.0155, 0.1328, 0.1430, 0.0610, 0.0074, 0.1092, 0.0606, 0.0610], + device='cuda:2'), in_proj_covar=tensor([0.0167, 0.0271, 0.0205, 0.0196, 0.0127, 0.0183, 0.0216, 0.0224], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 14:22:08,959 INFO [train.py:898] (2/4) Epoch 21, batch 2400, loss[loss=0.1481, simple_loss=0.2278, pruned_loss=0.03426, over 18403.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2556, pruned_loss=0.0374, over 3567999.31 frames. ], batch size: 42, lr: 5.35e-03, grad_scale: 8.0 +2023-03-09 14:22:34,017 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5763, 3.5399, 3.4046, 3.0744, 3.3407, 2.7491, 2.7333, 3.6488], + device='cuda:2'), covar=tensor([0.0066, 0.0086, 0.0082, 0.0160, 0.0097, 0.0191, 0.0209, 0.0058], + device='cuda:2'), in_proj_covar=tensor([0.0140, 0.0159, 0.0136, 0.0185, 0.0142, 0.0178, 0.0183, 0.0122], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 14:23:06,748 INFO [train.py:898] (2/4) Epoch 21, batch 2450, loss[loss=0.1893, simple_loss=0.2794, pruned_loss=0.04957, over 18041.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2556, pruned_loss=0.03751, over 3568876.88 frames. ], batch size: 65, lr: 5.35e-03, grad_scale: 8.0 +2023-03-09 14:23:50,291 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4556, 5.4462, 5.0856, 5.4126, 5.3828, 4.7828, 5.2959, 5.0477], + device='cuda:2'), covar=tensor([0.0435, 0.0404, 0.1220, 0.0701, 0.0575, 0.0410, 0.0386, 0.0994], + device='cuda:2'), in_proj_covar=tensor([0.0502, 0.0557, 0.0703, 0.0436, 0.0448, 0.0507, 0.0542, 0.0684], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 14:24:00,723 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.788e+02 2.611e+02 3.062e+02 3.854e+02 9.257e+02, threshold=6.124e+02, percent-clipped=4.0 +2023-03-09 14:24:03,984 INFO [train.py:898] (2/4) Epoch 21, batch 2500, loss[loss=0.1641, simple_loss=0.2563, pruned_loss=0.03594, over 18620.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2541, pruned_loss=0.03697, over 3580446.59 frames. ], batch size: 52, lr: 5.35e-03, grad_scale: 8.0 +2023-03-09 14:24:22,978 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1655, 3.9187, 5.3138, 2.9859, 4.6807, 2.7861, 3.2388, 2.1511], + device='cuda:2'), covar=tensor([0.1023, 0.0886, 0.0141, 0.0890, 0.0490, 0.2554, 0.2758, 0.1977], + device='cuda:2'), in_proj_covar=tensor([0.0218, 0.0244, 0.0189, 0.0195, 0.0256, 0.0269, 0.0319, 0.0232], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 14:24:59,582 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75228.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:25:02,700 INFO [train.py:898] (2/4) Epoch 21, batch 2550, loss[loss=0.1745, simple_loss=0.2684, pruned_loss=0.0403, over 17991.00 frames. ], tot_loss[loss=0.1637, simple_loss=0.2538, pruned_loss=0.03682, over 3581918.52 frames. ], batch size: 65, lr: 5.35e-03, grad_scale: 8.0 +2023-03-09 14:25:26,238 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75251.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:25:42,173 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75265.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:25:48,463 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75270.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:25:57,522 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.566e+02 2.536e+02 3.166e+02 3.977e+02 7.777e+02, threshold=6.331e+02, percent-clipped=2.0 +2023-03-09 14:26:01,094 INFO [train.py:898] (2/4) Epoch 21, batch 2600, loss[loss=0.168, simple_loss=0.2608, pruned_loss=0.03765, over 18468.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.254, pruned_loss=0.03691, over 3581359.42 frames. ], batch size: 59, lr: 5.34e-03, grad_scale: 8.0 +2023-03-09 14:26:11,214 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75289.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:26:22,512 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75299.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:26:45,908 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75320.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:26:59,283 INFO [train.py:898] (2/4) Epoch 21, batch 2650, loss[loss=0.1585, simple_loss=0.2517, pruned_loss=0.03264, over 18615.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2553, pruned_loss=0.03772, over 3572889.26 frames. ], batch size: 52, lr: 5.34e-03, grad_scale: 8.0 +2023-03-09 14:26:59,702 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75331.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 14:27:53,150 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.804e+02 2.756e+02 3.274e+02 3.890e+02 7.803e+02, threshold=6.547e+02, percent-clipped=3.0 +2023-03-09 14:27:57,178 INFO [train.py:898] (2/4) Epoch 21, batch 2700, loss[loss=0.131, simple_loss=0.2232, pruned_loss=0.01937, over 18484.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2556, pruned_loss=0.03784, over 3574366.49 frames. ], batch size: 44, lr: 5.34e-03, grad_scale: 8.0 +2023-03-09 14:28:30,462 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4354, 3.8640, 4.9969, 4.1609, 2.7542, 2.6764, 4.4105, 5.2612], + device='cuda:2'), covar=tensor([0.0875, 0.1316, 0.0173, 0.0456, 0.1143, 0.1280, 0.0376, 0.0212], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0274, 0.0152, 0.0181, 0.0191, 0.0190, 0.0195, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 14:28:55,178 INFO [train.py:898] (2/4) Epoch 21, batch 2750, loss[loss=0.1469, simple_loss=0.2291, pruned_loss=0.03235, over 18380.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2547, pruned_loss=0.03733, over 3583745.27 frames. ], batch size: 42, lr: 5.34e-03, grad_scale: 4.0 +2023-03-09 14:29:22,164 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9259, 4.1926, 4.1565, 4.2219, 3.8907, 4.1069, 3.8062, 4.1358], + device='cuda:2'), covar=tensor([0.0262, 0.0340, 0.0248, 0.0480, 0.0329, 0.0251, 0.0835, 0.0341], + device='cuda:2'), in_proj_covar=tensor([0.0216, 0.0260, 0.0254, 0.0327, 0.0270, 0.0268, 0.0307, 0.0259], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 14:29:46,087 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6593, 3.4601, 4.7838, 4.1398, 3.1019, 2.8931, 4.2799, 4.9693], + device='cuda:2'), covar=tensor([0.0820, 0.1537, 0.0184, 0.0420, 0.1023, 0.1213, 0.0396, 0.0189], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0272, 0.0151, 0.0180, 0.0189, 0.0189, 0.0194, 0.0196], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 14:29:50,241 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.664e+02 2.526e+02 3.249e+02 3.895e+02 6.224e+02, threshold=6.498e+02, percent-clipped=0.0 +2023-03-09 14:29:52,449 INFO [train.py:898] (2/4) Epoch 21, batch 2800, loss[loss=0.1417, simple_loss=0.2277, pruned_loss=0.02785, over 18431.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2545, pruned_loss=0.0372, over 3589795.30 frames. ], batch size: 43, lr: 5.34e-03, grad_scale: 8.0 +2023-03-09 14:30:09,600 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-03-09 14:30:50,507 INFO [train.py:898] (2/4) Epoch 21, batch 2850, loss[loss=0.1759, simple_loss=0.2618, pruned_loss=0.04497, over 18285.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2542, pruned_loss=0.037, over 3586745.43 frames. ], batch size: 57, lr: 5.34e-03, grad_scale: 8.0 +2023-03-09 14:31:11,991 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8466, 3.6132, 4.8739, 4.0992, 3.1286, 2.7369, 4.2345, 5.0461], + device='cuda:2'), covar=tensor([0.0766, 0.1425, 0.0198, 0.0455, 0.1050, 0.1418, 0.0462, 0.0242], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0273, 0.0151, 0.0180, 0.0190, 0.0190, 0.0195, 0.0196], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 14:31:31,072 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75565.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:31:46,374 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.918e+02 2.851e+02 3.414e+02 4.346e+02 8.993e+02, threshold=6.828e+02, percent-clipped=4.0 +2023-03-09 14:31:48,600 INFO [train.py:898] (2/4) Epoch 21, batch 2900, loss[loss=0.16, simple_loss=0.2563, pruned_loss=0.03186, over 18306.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2535, pruned_loss=0.03683, over 3589709.04 frames. ], batch size: 57, lr: 5.33e-03, grad_scale: 8.0 +2023-03-09 14:31:52,668 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75584.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:32:27,074 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75613.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:32:35,109 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75620.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:32:42,063 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75626.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 14:32:47,483 INFO [train.py:898] (2/4) Epoch 21, batch 2950, loss[loss=0.1662, simple_loss=0.2613, pruned_loss=0.03554, over 18364.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2541, pruned_loss=0.03729, over 3581299.35 frames. ], batch size: 55, lr: 5.33e-03, grad_scale: 8.0 +2023-03-09 14:33:31,425 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75668.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:33:35,073 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5281, 3.2105, 1.8890, 4.2879, 2.8959, 3.8152, 2.0358, 3.4179], + device='cuda:2'), covar=tensor([0.0567, 0.0950, 0.1690, 0.0502, 0.0931, 0.0357, 0.1557, 0.0627], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0226, 0.0189, 0.0284, 0.0190, 0.0263, 0.0202, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 14:33:43,526 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.752e+02 2.513e+02 3.131e+02 3.690e+02 6.570e+02, threshold=6.263e+02, percent-clipped=0.0 +2023-03-09 14:33:45,524 INFO [train.py:898] (2/4) Epoch 21, batch 3000, loss[loss=0.1912, simple_loss=0.2817, pruned_loss=0.0504, over 16007.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2545, pruned_loss=0.03751, over 3582253.72 frames. ], batch size: 94, lr: 5.33e-03, grad_scale: 8.0 +2023-03-09 14:33:45,524 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 14:33:57,963 INFO [train.py:932] (2/4) Epoch 21, validation: loss=0.1498, simple_loss=0.2495, pruned_loss=0.02501, over 944034.00 frames. +2023-03-09 14:33:57,964 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 14:33:58,793 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.70 vs. limit=2.0 +2023-03-09 14:34:55,563 INFO [train.py:898] (2/4) Epoch 21, batch 3050, loss[loss=0.175, simple_loss=0.2642, pruned_loss=0.04286, over 18361.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2547, pruned_loss=0.03764, over 3577840.85 frames. ], batch size: 55, lr: 5.33e-03, grad_scale: 8.0 +2023-03-09 14:35:50,117 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.13 vs. limit=5.0 +2023-03-09 14:35:51,756 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.562e+02 2.753e+02 3.195e+02 3.749e+02 6.792e+02, threshold=6.390e+02, percent-clipped=2.0 +2023-03-09 14:35:54,451 INFO [train.py:898] (2/4) Epoch 21, batch 3100, loss[loss=0.2195, simple_loss=0.3005, pruned_loss=0.06921, over 13031.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2544, pruned_loss=0.03736, over 3583881.18 frames. ], batch size: 129, lr: 5.33e-03, grad_scale: 8.0 +2023-03-09 14:36:06,970 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-09 14:36:21,448 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-09 14:36:52,863 INFO [train.py:898] (2/4) Epoch 21, batch 3150, loss[loss=0.1961, simple_loss=0.2762, pruned_loss=0.058, over 12450.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.254, pruned_loss=0.03727, over 3581554.57 frames. ], batch size: 129, lr: 5.32e-03, grad_scale: 8.0 +2023-03-09 14:37:24,418 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8342, 4.9081, 4.9442, 4.6628, 4.6909, 4.6931, 5.0420, 5.0352], + device='cuda:2'), covar=tensor([0.0068, 0.0061, 0.0063, 0.0113, 0.0064, 0.0152, 0.0069, 0.0093], + device='cuda:2'), in_proj_covar=tensor([0.0093, 0.0070, 0.0074, 0.0093, 0.0074, 0.0104, 0.0086, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 14:37:49,365 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.108e+02 2.640e+02 3.076e+02 3.933e+02 6.282e+02, threshold=6.152e+02, percent-clipped=0.0 +2023-03-09 14:37:51,672 INFO [train.py:898] (2/4) Epoch 21, batch 3200, loss[loss=0.1464, simple_loss=0.2337, pruned_loss=0.02951, over 18261.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2539, pruned_loss=0.03722, over 3560625.73 frames. ], batch size: 47, lr: 5.32e-03, grad_scale: 8.0 +2023-03-09 14:37:55,537 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75884.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:38:21,714 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6143, 3.3207, 2.2088, 4.2701, 3.0052, 4.1217, 2.4457, 3.8012], + device='cuda:2'), covar=tensor([0.0566, 0.0881, 0.1488, 0.0521, 0.0859, 0.0308, 0.1253, 0.0435], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0226, 0.0189, 0.0283, 0.0190, 0.0262, 0.0201, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 14:38:45,197 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75926.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 14:38:50,629 INFO [train.py:898] (2/4) Epoch 21, batch 3250, loss[loss=0.1676, simple_loss=0.2635, pruned_loss=0.03587, over 18388.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2535, pruned_loss=0.03734, over 3557901.73 frames. ], batch size: 52, lr: 5.32e-03, grad_scale: 8.0 +2023-03-09 14:38:51,988 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75932.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:39:41,482 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=75974.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:39:46,934 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.093e+02 2.622e+02 3.085e+02 3.781e+02 1.173e+03, threshold=6.171e+02, percent-clipped=7.0 +2023-03-09 14:39:49,200 INFO [train.py:898] (2/4) Epoch 21, batch 3300, loss[loss=0.1559, simple_loss=0.2397, pruned_loss=0.03608, over 18486.00 frames. ], tot_loss[loss=0.1632, simple_loss=0.2523, pruned_loss=0.03704, over 3576301.20 frames. ], batch size: 47, lr: 5.32e-03, grad_scale: 8.0 +2023-03-09 14:40:04,913 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1286, 2.5357, 3.0801, 3.0584, 2.4572, 3.3924, 3.2674, 2.4440], + device='cuda:2'), covar=tensor([0.0476, 0.1325, 0.0573, 0.0419, 0.1417, 0.0313, 0.0656, 0.1063], + device='cuda:2'), in_proj_covar=tensor([0.0210, 0.0238, 0.0211, 0.0164, 0.0223, 0.0213, 0.0245, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 14:40:52,541 INFO [train.py:898] (2/4) Epoch 21, batch 3350, loss[loss=0.2002, simple_loss=0.2921, pruned_loss=0.05415, over 12444.00 frames. ], tot_loss[loss=0.1632, simple_loss=0.2522, pruned_loss=0.0371, over 3556846.14 frames. ], batch size: 129, lr: 5.32e-03, grad_scale: 8.0 +2023-03-09 14:41:37,625 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9859, 4.6006, 4.5946, 3.6154, 3.7915, 3.5771, 2.8339, 2.5547], + device='cuda:2'), covar=tensor([0.0199, 0.0171, 0.0089, 0.0268, 0.0339, 0.0235, 0.0693, 0.0849], + device='cuda:2'), in_proj_covar=tensor([0.0069, 0.0059, 0.0062, 0.0067, 0.0088, 0.0066, 0.0076, 0.0083], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 14:41:48,800 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.784e+02 2.702e+02 3.276e+02 4.045e+02 1.355e+03, threshold=6.552e+02, percent-clipped=6.0 +2023-03-09 14:41:51,073 INFO [train.py:898] (2/4) Epoch 21, batch 3400, loss[loss=0.1741, simple_loss=0.265, pruned_loss=0.04165, over 17687.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2522, pruned_loss=0.03714, over 3559949.15 frames. ], batch size: 70, lr: 5.32e-03, grad_scale: 8.0 +2023-03-09 14:42:02,888 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-03-09 14:42:30,124 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6027, 3.4044, 2.2014, 4.4324, 3.0642, 4.2173, 2.4559, 3.7871], + device='cuda:2'), covar=tensor([0.0646, 0.0895, 0.1595, 0.0482, 0.0904, 0.0362, 0.1307, 0.0511], + device='cuda:2'), in_proj_covar=tensor([0.0214, 0.0227, 0.0191, 0.0286, 0.0192, 0.0265, 0.0204, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 14:42:49,331 INFO [train.py:898] (2/4) Epoch 21, batch 3450, loss[loss=0.1741, simple_loss=0.2653, pruned_loss=0.04147, over 18490.00 frames. ], tot_loss[loss=0.1637, simple_loss=0.2529, pruned_loss=0.03725, over 3554721.15 frames. ], batch size: 51, lr: 5.31e-03, grad_scale: 8.0 +2023-03-09 14:43:45,620 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.955e+02 2.600e+02 3.134e+02 3.743e+02 8.321e+02, threshold=6.269e+02, percent-clipped=3.0 +2023-03-09 14:43:47,841 INFO [train.py:898] (2/4) Epoch 21, batch 3500, loss[loss=0.1428, simple_loss=0.2233, pruned_loss=0.03114, over 18445.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2537, pruned_loss=0.03716, over 3574511.32 frames. ], batch size: 43, lr: 5.31e-03, grad_scale: 8.0 +2023-03-09 14:43:48,553 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.87 vs. limit=5.0 +2023-03-09 14:44:03,772 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-09 14:44:43,826 INFO [train.py:898] (2/4) Epoch 21, batch 3550, loss[loss=0.1752, simple_loss=0.2637, pruned_loss=0.04332, over 17153.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2532, pruned_loss=0.03694, over 3576523.00 frames. ], batch size: 78, lr: 5.31e-03, grad_scale: 4.0 +2023-03-09 14:45:24,489 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76269.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 14:45:36,127 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.098e+02 2.775e+02 3.080e+02 3.770e+02 8.534e+02, threshold=6.159e+02, percent-clipped=2.0 +2023-03-09 14:45:37,246 INFO [train.py:898] (2/4) Epoch 21, batch 3600, loss[loss=0.1543, simple_loss=0.2391, pruned_loss=0.0348, over 18357.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2529, pruned_loss=0.03697, over 3575282.93 frames. ], batch size: 46, lr: 5.31e-03, grad_scale: 8.0 +2023-03-09 14:45:41,735 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2463, 5.2459, 5.5917, 5.6179, 5.2704, 6.0985, 5.7232, 5.3849], + device='cuda:2'), covar=tensor([0.1196, 0.0575, 0.0696, 0.0834, 0.1459, 0.0730, 0.0709, 0.1612], + device='cuda:2'), in_proj_covar=tensor([0.0353, 0.0283, 0.0310, 0.0310, 0.0326, 0.0420, 0.0278, 0.0415], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0002, 0.0004], + device='cuda:2') +2023-03-09 14:45:44,368 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6016, 2.4026, 2.6784, 2.6651, 3.2352, 4.7375, 4.6676, 3.2777], + device='cuda:2'), covar=tensor([0.1844, 0.2421, 0.2863, 0.1900, 0.2423, 0.0256, 0.0387, 0.0963], + device='cuda:2'), in_proj_covar=tensor([0.0301, 0.0347, 0.0384, 0.0278, 0.0390, 0.0243, 0.0297, 0.0256], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 14:45:51,567 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76294.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:46:42,828 INFO [train.py:898] (2/4) Epoch 22, batch 0, loss[loss=0.1463, simple_loss=0.2296, pruned_loss=0.03148, over 18512.00 frames. ], tot_loss[loss=0.1463, simple_loss=0.2296, pruned_loss=0.03148, over 18512.00 frames. ], batch size: 47, lr: 5.18e-03, grad_scale: 8.0 +2023-03-09 14:46:42,829 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 14:46:54,611 INFO [train.py:932] (2/4) Epoch 22, validation: loss=0.1504, simple_loss=0.25, pruned_loss=0.02541, over 944034.00 frames. +2023-03-09 14:46:54,612 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 14:46:57,492 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-09 14:47:04,752 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76323.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 14:47:13,372 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76330.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 14:47:39,485 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.93 vs. limit=5.0 +2023-03-09 14:47:42,773 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76355.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:47:53,559 INFO [train.py:898] (2/4) Epoch 22, batch 50, loss[loss=0.164, simple_loss=0.2572, pruned_loss=0.03545, over 18036.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.2512, pruned_loss=0.03528, over 817009.85 frames. ], batch size: 65, lr: 5.18e-03, grad_scale: 8.0 +2023-03-09 14:48:11,539 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.962e+02 2.572e+02 3.075e+02 3.753e+02 8.462e+02, threshold=6.150e+02, percent-clipped=5.0 +2023-03-09 14:48:16,368 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76384.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 14:48:52,292 INFO [train.py:898] (2/4) Epoch 22, batch 100, loss[loss=0.1839, simple_loss=0.2728, pruned_loss=0.04745, over 18140.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2507, pruned_loss=0.03573, over 1441676.81 frames. ], batch size: 62, lr: 5.18e-03, grad_scale: 8.0 +2023-03-09 14:49:26,600 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1046, 5.2370, 5.2070, 4.9245, 4.9090, 4.9927, 5.3013, 5.2476], + device='cuda:2'), covar=tensor([0.0075, 0.0058, 0.0055, 0.0103, 0.0062, 0.0138, 0.0076, 0.0126], + device='cuda:2'), in_proj_covar=tensor([0.0092, 0.0068, 0.0073, 0.0091, 0.0073, 0.0102, 0.0085, 0.0085], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 14:49:49,355 INFO [train.py:898] (2/4) Epoch 22, batch 150, loss[loss=0.1703, simple_loss=0.2549, pruned_loss=0.04281, over 18357.00 frames. ], tot_loss[loss=0.1625, simple_loss=0.2519, pruned_loss=0.03651, over 1918362.39 frames. ], batch size: 46, lr: 5.18e-03, grad_scale: 8.0 +2023-03-09 14:50:05,947 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.617e+02 2.647e+02 3.251e+02 3.870e+02 7.271e+02, threshold=6.503e+02, percent-clipped=3.0 +2023-03-09 14:50:46,566 INFO [train.py:898] (2/4) Epoch 22, batch 200, loss[loss=0.1607, simple_loss=0.2509, pruned_loss=0.03521, over 17701.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2514, pruned_loss=0.03597, over 2293448.10 frames. ], batch size: 70, lr: 5.18e-03, grad_scale: 8.0 +2023-03-09 14:51:45,318 INFO [train.py:898] (2/4) Epoch 22, batch 250, loss[loss=0.1732, simple_loss=0.2638, pruned_loss=0.0413, over 18563.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2515, pruned_loss=0.03607, over 2576103.26 frames. ], batch size: 54, lr: 5.18e-03, grad_scale: 8.0 +2023-03-09 14:52:02,089 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.820e+02 2.535e+02 2.870e+02 3.429e+02 6.205e+02, threshold=5.739e+02, percent-clipped=0.0 +2023-03-09 14:52:44,276 INFO [train.py:898] (2/4) Epoch 22, batch 300, loss[loss=0.1688, simple_loss=0.2606, pruned_loss=0.03855, over 18396.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2511, pruned_loss=0.03615, over 2793882.71 frames. ], batch size: 52, lr: 5.17e-03, grad_scale: 8.0 +2023-03-09 14:52:55,710 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76625.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 14:53:25,122 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76650.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:53:42,869 INFO [train.py:898] (2/4) Epoch 22, batch 350, loss[loss=0.1867, simple_loss=0.2787, pruned_loss=0.04735, over 16060.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2514, pruned_loss=0.03613, over 2979608.19 frames. ], batch size: 94, lr: 5.17e-03, grad_scale: 8.0 +2023-03-09 14:53:58,887 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76679.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 14:53:59,706 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.678e+02 2.562e+02 3.001e+02 3.817e+02 1.277e+03, threshold=6.002e+02, percent-clipped=2.0 +2023-03-09 14:54:41,503 INFO [train.py:898] (2/4) Epoch 22, batch 400, loss[loss=0.1587, simple_loss=0.2451, pruned_loss=0.03616, over 18481.00 frames. ], tot_loss[loss=0.162, simple_loss=0.2517, pruned_loss=0.03617, over 3119714.59 frames. ], batch size: 44, lr: 5.17e-03, grad_scale: 8.0 +2023-03-09 14:55:28,782 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-09 14:55:30,670 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-09 14:55:40,020 INFO [train.py:898] (2/4) Epoch 22, batch 450, loss[loss=0.1378, simple_loss=0.2193, pruned_loss=0.02812, over 18192.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.252, pruned_loss=0.03644, over 3224925.52 frames. ], batch size: 44, lr: 5.17e-03, grad_scale: 8.0 +2023-03-09 14:55:41,922 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-03-09 14:55:57,343 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.685e+02 2.597e+02 2.928e+02 3.376e+02 5.957e+02, threshold=5.857e+02, percent-clipped=0.0 +2023-03-09 14:56:28,265 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8758, 5.4117, 5.3430, 5.4174, 4.8664, 5.3054, 4.6669, 5.2705], + device='cuda:2'), covar=tensor([0.0240, 0.0260, 0.0200, 0.0438, 0.0409, 0.0230, 0.1097, 0.0348], + device='cuda:2'), in_proj_covar=tensor([0.0218, 0.0263, 0.0258, 0.0333, 0.0273, 0.0270, 0.0314, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 14:56:38,194 INFO [train.py:898] (2/4) Epoch 22, batch 500, loss[loss=0.1394, simple_loss=0.2243, pruned_loss=0.02723, over 18248.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2518, pruned_loss=0.03637, over 3300950.40 frames. ], batch size: 45, lr: 5.17e-03, grad_scale: 8.0 +2023-03-09 14:56:42,613 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8200, 3.5992, 4.9070, 2.7521, 4.3700, 2.4513, 2.9539, 1.8847], + device='cuda:2'), covar=tensor([0.1200, 0.0950, 0.0176, 0.1014, 0.0550, 0.2801, 0.2901, 0.2162], + device='cuda:2'), in_proj_covar=tensor([0.0219, 0.0246, 0.0192, 0.0198, 0.0258, 0.0272, 0.0322, 0.0235], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 14:57:06,176 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76839.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:57:13,008 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8231, 3.5651, 5.1274, 2.8753, 4.5530, 2.4664, 3.0297, 1.7949], + device='cuda:2'), covar=tensor([0.1133, 0.0866, 0.0130, 0.0865, 0.0486, 0.2632, 0.2453, 0.2094], + device='cuda:2'), in_proj_covar=tensor([0.0218, 0.0245, 0.0191, 0.0197, 0.0256, 0.0270, 0.0319, 0.0233], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 14:57:36,635 INFO [train.py:898] (2/4) Epoch 22, batch 550, loss[loss=0.1735, simple_loss=0.2593, pruned_loss=0.04381, over 18299.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2523, pruned_loss=0.03652, over 3357367.40 frames. ], batch size: 57, lr: 5.17e-03, grad_scale: 8.0 +2023-03-09 14:57:53,083 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.4056, 4.7716, 4.4051, 4.6487, 4.4824, 4.4435, 4.8506, 4.7555], + device='cuda:2'), covar=tensor([0.0996, 0.0777, 0.1841, 0.0746, 0.1282, 0.0681, 0.0679, 0.0778], + device='cuda:2'), in_proj_covar=tensor([0.0604, 0.0527, 0.0380, 0.0546, 0.0733, 0.0545, 0.0746, 0.0565], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0005, 0.0004], + device='cuda:2') +2023-03-09 14:57:53,942 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.970e+02 2.654e+02 3.120e+02 3.965e+02 8.304e+02, threshold=6.239e+02, percent-clipped=1.0 +2023-03-09 14:58:17,079 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76900.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:58:34,774 INFO [train.py:898] (2/4) Epoch 22, batch 600, loss[loss=0.1796, simple_loss=0.2657, pruned_loss=0.04676, over 18566.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2525, pruned_loss=0.03643, over 3403907.77 frames. ], batch size: 54, lr: 5.16e-03, grad_scale: 8.0 +2023-03-09 14:58:47,328 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76925.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 14:59:14,935 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76950.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 14:59:32,245 INFO [train.py:898] (2/4) Epoch 22, batch 650, loss[loss=0.1835, simple_loss=0.2749, pruned_loss=0.04604, over 17984.00 frames. ], tot_loss[loss=0.1622, simple_loss=0.252, pruned_loss=0.03618, over 3442500.06 frames. ], batch size: 65, lr: 5.16e-03, grad_scale: 8.0 +2023-03-09 14:59:42,092 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76973.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 14:59:49,471 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76979.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 14:59:50,127 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.663e+02 2.460e+02 2.912e+02 3.515e+02 5.898e+02, threshold=5.824e+02, percent-clipped=0.0 +2023-03-09 15:00:10,833 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=76998.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:00:30,420 INFO [train.py:898] (2/4) Epoch 22, batch 700, loss[loss=0.1774, simple_loss=0.2675, pruned_loss=0.04364, over 18396.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2525, pruned_loss=0.03641, over 3473692.57 frames. ], batch size: 52, lr: 5.16e-03, grad_scale: 8.0 +2023-03-09 15:00:31,876 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.2387, 4.1614, 4.0023, 4.1523, 4.1806, 3.6640, 4.1392, 3.9881], + device='cuda:2'), covar=tensor([0.0494, 0.0764, 0.1271, 0.0717, 0.0605, 0.0507, 0.0492, 0.0978], + device='cuda:2'), in_proj_covar=tensor([0.0498, 0.0559, 0.0700, 0.0436, 0.0448, 0.0508, 0.0542, 0.0672], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 15:00:42,784 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6166, 2.2852, 2.5026, 2.5147, 3.1897, 4.7636, 4.6168, 3.3720], + device='cuda:2'), covar=tensor([0.1911, 0.2575, 0.3334, 0.2006, 0.2412, 0.0251, 0.0402, 0.0947], + device='cuda:2'), in_proj_covar=tensor([0.0305, 0.0349, 0.0387, 0.0281, 0.0393, 0.0245, 0.0298, 0.0257], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 15:00:43,764 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6683, 4.7244, 4.7359, 4.4540, 4.4681, 4.5466, 4.8151, 4.8191], + device='cuda:2'), covar=tensor([0.0090, 0.0074, 0.0065, 0.0134, 0.0068, 0.0152, 0.0098, 0.0123], + device='cuda:2'), in_proj_covar=tensor([0.0094, 0.0069, 0.0074, 0.0093, 0.0075, 0.0104, 0.0086, 0.0085], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 15:00:45,313 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77027.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 15:01:29,150 INFO [train.py:898] (2/4) Epoch 22, batch 750, loss[loss=0.1581, simple_loss=0.2552, pruned_loss=0.03054, over 18361.00 frames. ], tot_loss[loss=0.1629, simple_loss=0.2528, pruned_loss=0.0365, over 3507472.51 frames. ], batch size: 55, lr: 5.16e-03, grad_scale: 8.0 +2023-03-09 15:01:47,416 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.651e+02 2.728e+02 3.361e+02 4.039e+02 1.393e+03, threshold=6.722e+02, percent-clipped=6.0 +2023-03-09 15:02:05,331 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77095.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:02:27,379 INFO [train.py:898] (2/4) Epoch 22, batch 800, loss[loss=0.1674, simple_loss=0.2582, pruned_loss=0.03824, over 18379.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2531, pruned_loss=0.03673, over 3527646.27 frames. ], batch size: 50, lr: 5.16e-03, grad_scale: 8.0 +2023-03-09 15:02:46,175 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5841, 2.8372, 2.5319, 2.8967, 3.6790, 3.5246, 3.0550, 2.9288], + device='cuda:2'), covar=tensor([0.0247, 0.0240, 0.0580, 0.0385, 0.0161, 0.0157, 0.0390, 0.0356], + device='cuda:2'), in_proj_covar=tensor([0.0141, 0.0135, 0.0166, 0.0160, 0.0131, 0.0119, 0.0155, 0.0157], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 15:02:51,296 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8663, 4.0078, 2.2916, 3.9444, 5.2311, 2.4904, 3.7767, 3.9998], + device='cuda:2'), covar=tensor([0.0195, 0.1265, 0.1754, 0.0695, 0.0078, 0.1327, 0.0688, 0.0676], + device='cuda:2'), in_proj_covar=tensor([0.0169, 0.0274, 0.0206, 0.0196, 0.0127, 0.0184, 0.0216, 0.0223], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 15:03:16,308 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77156.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 15:03:25,846 INFO [train.py:898] (2/4) Epoch 22, batch 850, loss[loss=0.1426, simple_loss=0.2288, pruned_loss=0.02825, over 18491.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2528, pruned_loss=0.0369, over 3530225.92 frames. ], batch size: 44, lr: 5.16e-03, grad_scale: 8.0 +2023-03-09 15:03:43,940 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.868e+02 2.628e+02 3.238e+02 3.999e+02 6.972e+02, threshold=6.476e+02, percent-clipped=1.0 +2023-03-09 15:04:02,083 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77195.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:04:20,638 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77211.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:04:24,961 INFO [train.py:898] (2/4) Epoch 22, batch 900, loss[loss=0.1374, simple_loss=0.2211, pruned_loss=0.02686, over 18378.00 frames. ], tot_loss[loss=0.1629, simple_loss=0.2523, pruned_loss=0.03676, over 3539506.13 frames. ], batch size: 42, lr: 5.15e-03, grad_scale: 8.0 +2023-03-09 15:05:23,912 INFO [train.py:898] (2/4) Epoch 22, batch 950, loss[loss=0.1625, simple_loss=0.2582, pruned_loss=0.03344, over 18363.00 frames. ], tot_loss[loss=0.1629, simple_loss=0.2527, pruned_loss=0.03656, over 3550981.63 frames. ], batch size: 56, lr: 5.15e-03, grad_scale: 8.0 +2023-03-09 15:05:32,132 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77272.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:05:33,147 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77273.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:05:37,480 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77277.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:05:41,083 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.729e+02 2.727e+02 3.251e+02 3.771e+02 1.514e+03, threshold=6.501e+02, percent-clipped=4.0 +2023-03-09 15:06:22,532 INFO [train.py:898] (2/4) Epoch 22, batch 1000, loss[loss=0.1879, simple_loss=0.2777, pruned_loss=0.04906, over 18364.00 frames. ], tot_loss[loss=0.163, simple_loss=0.2527, pruned_loss=0.03664, over 3558532.39 frames. ], batch size: 56, lr: 5.15e-03, grad_scale: 8.0 +2023-03-09 15:06:30,071 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-03-09 15:06:45,462 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77334.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:06:50,114 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77338.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:06:51,203 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7472, 5.3115, 5.2755, 5.3056, 4.8320, 5.2314, 4.6449, 5.1859], + device='cuda:2'), covar=tensor([0.0249, 0.0271, 0.0200, 0.0411, 0.0396, 0.0232, 0.1018, 0.0326], + device='cuda:2'), in_proj_covar=tensor([0.0218, 0.0263, 0.0257, 0.0334, 0.0273, 0.0270, 0.0313, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 15:06:55,199 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4387, 3.3072, 2.0376, 4.2564, 2.8808, 4.1380, 2.4510, 3.8409], + device='cuda:2'), covar=tensor([0.0717, 0.0890, 0.1623, 0.0579, 0.0964, 0.0295, 0.1231, 0.0391], + device='cuda:2'), in_proj_covar=tensor([0.0217, 0.0228, 0.0192, 0.0288, 0.0194, 0.0265, 0.0204, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 15:07:01,444 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.87 vs. limit=5.0 +2023-03-09 15:07:09,477 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.6288, 5.5785, 5.2219, 5.5267, 5.5365, 5.0228, 5.3983, 5.1556], + device='cuda:2'), covar=tensor([0.0354, 0.0408, 0.1256, 0.0678, 0.0512, 0.0371, 0.0413, 0.0991], + device='cuda:2'), in_proj_covar=tensor([0.0493, 0.0550, 0.0693, 0.0434, 0.0443, 0.0503, 0.0538, 0.0665], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 15:07:21,368 INFO [train.py:898] (2/4) Epoch 22, batch 1050, loss[loss=0.155, simple_loss=0.2436, pruned_loss=0.03315, over 18404.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2528, pruned_loss=0.03637, over 3561417.76 frames. ], batch size: 48, lr: 5.15e-03, grad_scale: 8.0 +2023-03-09 15:07:38,060 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.901e+02 2.608e+02 3.171e+02 4.041e+02 8.045e+02, threshold=6.343e+02, percent-clipped=4.0 +2023-03-09 15:08:19,736 INFO [train.py:898] (2/4) Epoch 22, batch 1100, loss[loss=0.1439, simple_loss=0.2244, pruned_loss=0.03164, over 18423.00 frames. ], tot_loss[loss=0.1629, simple_loss=0.2528, pruned_loss=0.03652, over 3571469.01 frames. ], batch size: 43, lr: 5.15e-03, grad_scale: 8.0 +2023-03-09 15:08:20,095 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5096, 3.3510, 2.2070, 4.2048, 2.9636, 4.0551, 2.4730, 3.7276], + device='cuda:2'), covar=tensor([0.0609, 0.0816, 0.1488, 0.0567, 0.0867, 0.0280, 0.1210, 0.0439], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0227, 0.0191, 0.0286, 0.0192, 0.0263, 0.0202, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 15:09:02,482 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77451.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 15:09:18,515 INFO [train.py:898] (2/4) Epoch 22, batch 1150, loss[loss=0.1712, simple_loss=0.2633, pruned_loss=0.03956, over 18554.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2531, pruned_loss=0.03673, over 3576012.73 frames. ], batch size: 54, lr: 5.15e-03, grad_scale: 8.0 +2023-03-09 15:09:35,430 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.520e+02 2.405e+02 2.730e+02 3.168e+02 5.091e+02, threshold=5.460e+02, percent-clipped=0.0 +2023-03-09 15:09:52,924 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77495.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:10:16,625 INFO [train.py:898] (2/4) Epoch 22, batch 1200, loss[loss=0.1767, simple_loss=0.2716, pruned_loss=0.04087, over 18232.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2521, pruned_loss=0.03662, over 3577903.61 frames. ], batch size: 60, lr: 5.14e-03, grad_scale: 8.0 +2023-03-09 15:10:42,573 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 15:10:48,605 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77543.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:11:15,450 INFO [train.py:898] (2/4) Epoch 22, batch 1250, loss[loss=0.1943, simple_loss=0.2813, pruned_loss=0.05368, over 18611.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2521, pruned_loss=0.03649, over 3580242.45 frames. ], batch size: 52, lr: 5.14e-03, grad_scale: 8.0 +2023-03-09 15:11:18,517 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77567.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:11:32,786 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.900e+02 2.671e+02 3.249e+02 3.869e+02 7.848e+02, threshold=6.498e+02, percent-clipped=7.0 +2023-03-09 15:11:39,217 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-09 15:12:04,834 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4654, 5.4151, 5.0864, 5.3633, 5.3834, 4.8253, 5.2856, 5.0463], + device='cuda:2'), covar=tensor([0.0426, 0.0478, 0.1230, 0.0772, 0.0569, 0.0435, 0.0435, 0.0962], + device='cuda:2'), in_proj_covar=tensor([0.0500, 0.0560, 0.0700, 0.0442, 0.0449, 0.0510, 0.0545, 0.0672], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 15:12:13,381 INFO [train.py:898] (2/4) Epoch 22, batch 1300, loss[loss=0.1532, simple_loss=0.2464, pruned_loss=0.02996, over 18624.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.253, pruned_loss=0.03685, over 3572621.77 frames. ], batch size: 52, lr: 5.14e-03, grad_scale: 8.0 +2023-03-09 15:12:14,959 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77616.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:12:30,245 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77629.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:12:34,905 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77633.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:12:41,789 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0123, 5.4734, 5.4536, 5.4422, 4.9799, 5.4011, 4.8109, 5.3607], + device='cuda:2'), covar=tensor([0.0238, 0.0289, 0.0191, 0.0446, 0.0432, 0.0236, 0.1067, 0.0321], + device='cuda:2'), in_proj_covar=tensor([0.0218, 0.0263, 0.0257, 0.0335, 0.0274, 0.0270, 0.0313, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 15:12:52,309 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-09 15:13:12,278 INFO [train.py:898] (2/4) Epoch 22, batch 1350, loss[loss=0.1538, simple_loss=0.2455, pruned_loss=0.03108, over 18389.00 frames. ], tot_loss[loss=0.162, simple_loss=0.2515, pruned_loss=0.03621, over 3590438.85 frames. ], batch size: 50, lr: 5.14e-03, grad_scale: 8.0 +2023-03-09 15:13:16,217 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4783, 3.3291, 2.0723, 4.2578, 2.9397, 4.0636, 2.4215, 3.7161], + device='cuda:2'), covar=tensor([0.0632, 0.0902, 0.1568, 0.0526, 0.0868, 0.0289, 0.1294, 0.0434], + device='cuda:2'), in_proj_covar=tensor([0.0216, 0.0228, 0.0193, 0.0288, 0.0193, 0.0265, 0.0205, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 15:13:22,299 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 15:13:26,554 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77677.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:13:26,577 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6609, 3.6130, 3.5527, 3.1702, 3.3867, 2.5856, 2.4946, 3.6943], + device='cuda:2'), covar=tensor([0.0074, 0.0107, 0.0082, 0.0147, 0.0108, 0.0246, 0.0328, 0.0060], + device='cuda:2'), in_proj_covar=tensor([0.0141, 0.0161, 0.0135, 0.0189, 0.0143, 0.0180, 0.0183, 0.0122], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 15:13:26,582 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77677.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:13:29,478 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.757e+02 2.485e+02 2.994e+02 3.704e+02 6.281e+02, threshold=5.988e+02, percent-clipped=0.0 +2023-03-09 15:14:10,451 INFO [train.py:898] (2/4) Epoch 22, batch 1400, loss[loss=0.1769, simple_loss=0.2675, pruned_loss=0.04308, over 18479.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2525, pruned_loss=0.03645, over 3596543.57 frames. ], batch size: 59, lr: 5.14e-03, grad_scale: 4.0 +2023-03-09 15:14:14,172 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4445, 3.3341, 1.9807, 4.1549, 2.8152, 3.9647, 2.3497, 3.6542], + device='cuda:2'), covar=tensor([0.0641, 0.0883, 0.1583, 0.0543, 0.0850, 0.0350, 0.1268, 0.0434], + device='cuda:2'), in_proj_covar=tensor([0.0216, 0.0229, 0.0193, 0.0289, 0.0193, 0.0266, 0.0205, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 15:14:36,556 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77738.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:14:50,929 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77751.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 15:15:07,978 INFO [train.py:898] (2/4) Epoch 22, batch 1450, loss[loss=0.175, simple_loss=0.2696, pruned_loss=0.04016, over 18481.00 frames. ], tot_loss[loss=0.1629, simple_loss=0.2525, pruned_loss=0.03662, over 3594214.68 frames. ], batch size: 59, lr: 5.14e-03, grad_scale: 4.0 +2023-03-09 15:15:26,461 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.943e+02 2.691e+02 3.335e+02 4.479e+02 1.440e+03, threshold=6.670e+02, percent-clipped=5.0 +2023-03-09 15:15:46,838 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77799.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:16:06,832 INFO [train.py:898] (2/4) Epoch 22, batch 1500, loss[loss=0.1434, simple_loss=0.2354, pruned_loss=0.02569, over 18474.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2525, pruned_loss=0.0364, over 3586004.42 frames. ], batch size: 47, lr: 5.13e-03, grad_scale: 4.0 +2023-03-09 15:17:04,738 INFO [train.py:898] (2/4) Epoch 22, batch 1550, loss[loss=0.1656, simple_loss=0.2584, pruned_loss=0.03647, over 18383.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2521, pruned_loss=0.03653, over 3578765.26 frames. ], batch size: 52, lr: 5.13e-03, grad_scale: 4.0 +2023-03-09 15:17:07,443 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77867.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:17:23,125 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.906e+02 2.725e+02 3.263e+02 3.842e+02 6.752e+02, threshold=6.526e+02, percent-clipped=2.0 +2023-03-09 15:18:03,237 INFO [train.py:898] (2/4) Epoch 22, batch 1600, loss[loss=0.1826, simple_loss=0.2744, pruned_loss=0.04543, over 17878.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2522, pruned_loss=0.03675, over 3579501.57 frames. ], batch size: 70, lr: 5.13e-03, grad_scale: 8.0 +2023-03-09 15:18:03,402 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77915.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:18:19,198 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77929.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:18:24,219 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77933.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:18:33,151 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8245, 4.0155, 2.6481, 4.0116, 5.2099, 2.5262, 3.8087, 3.9475], + device='cuda:2'), covar=tensor([0.0217, 0.1268, 0.1580, 0.0741, 0.0108, 0.1296, 0.0733, 0.0781], + device='cuda:2'), in_proj_covar=tensor([0.0171, 0.0275, 0.0207, 0.0197, 0.0130, 0.0186, 0.0216, 0.0228], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 15:18:52,520 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-09 15:19:00,942 INFO [train.py:898] (2/4) Epoch 22, batch 1650, loss[loss=0.1805, simple_loss=0.2732, pruned_loss=0.04392, over 17165.00 frames. ], tot_loss[loss=0.1629, simple_loss=0.2524, pruned_loss=0.03675, over 3580637.51 frames. ], batch size: 78, lr: 5.13e-03, grad_scale: 8.0 +2023-03-09 15:19:09,469 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77972.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:19:15,203 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77977.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:19:19,843 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=77981.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:19:20,798 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.050e+02 2.525e+02 3.024e+02 3.859e+02 1.001e+03, threshold=6.048e+02, percent-clipped=3.0 +2023-03-09 15:19:26,287 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8338, 3.2682, 4.5339, 3.8914, 3.2489, 4.8258, 3.9850, 3.0243], + device='cuda:2'), covar=tensor([0.0460, 0.1135, 0.0253, 0.0408, 0.1170, 0.0192, 0.0546, 0.0924], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0238, 0.0211, 0.0163, 0.0222, 0.0211, 0.0247, 0.0196], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 15:20:03,112 INFO [train.py:898] (2/4) Epoch 22, batch 1700, loss[loss=0.1701, simple_loss=0.2573, pruned_loss=0.04143, over 18485.00 frames. ], tot_loss[loss=0.1632, simple_loss=0.2529, pruned_loss=0.03676, over 3592396.85 frames. ], batch size: 51, lr: 5.13e-03, grad_scale: 4.0 +2023-03-09 15:20:25,567 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78033.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:21:01,844 INFO [train.py:898] (2/4) Epoch 22, batch 1750, loss[loss=0.1514, simple_loss=0.2451, pruned_loss=0.02885, over 18507.00 frames. ], tot_loss[loss=0.163, simple_loss=0.2528, pruned_loss=0.03656, over 3590354.27 frames. ], batch size: 51, lr: 5.13e-03, grad_scale: 4.0 +2023-03-09 15:21:22,782 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.851e+02 2.720e+02 3.295e+02 3.984e+02 2.464e+03, threshold=6.591e+02, percent-clipped=4.0 +2023-03-09 15:21:48,308 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9265, 4.7318, 4.7506, 3.6783, 3.8637, 3.7324, 2.8058, 2.4948], + device='cuda:2'), covar=tensor([0.0240, 0.0123, 0.0071, 0.0250, 0.0361, 0.0195, 0.0647, 0.0825], + device='cuda:2'), in_proj_covar=tensor([0.0071, 0.0060, 0.0062, 0.0068, 0.0088, 0.0066, 0.0076, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 15:22:00,370 INFO [train.py:898] (2/4) Epoch 22, batch 1800, loss[loss=0.1371, simple_loss=0.2239, pruned_loss=0.02516, over 18484.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2524, pruned_loss=0.03654, over 3591783.77 frames. ], batch size: 44, lr: 5.12e-03, grad_scale: 4.0 +2023-03-09 15:22:58,102 INFO [train.py:898] (2/4) Epoch 22, batch 1850, loss[loss=0.1592, simple_loss=0.2595, pruned_loss=0.02946, over 18364.00 frames. ], tot_loss[loss=0.1629, simple_loss=0.2526, pruned_loss=0.03658, over 3587693.15 frames. ], batch size: 55, lr: 5.12e-03, grad_scale: 4.0 +2023-03-09 15:23:07,636 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9631, 5.4391, 5.4104, 5.3819, 4.9185, 5.3113, 4.8335, 5.3054], + device='cuda:2'), covar=tensor([0.0220, 0.0244, 0.0162, 0.0382, 0.0407, 0.0221, 0.0942, 0.0305], + device='cuda:2'), in_proj_covar=tensor([0.0219, 0.0263, 0.0256, 0.0335, 0.0272, 0.0271, 0.0312, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 15:23:19,115 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.849e+02 2.526e+02 2.955e+02 3.559e+02 8.124e+02, threshold=5.910e+02, percent-clipped=2.0 +2023-03-09 15:23:38,099 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-09 15:23:54,068 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0661, 4.3066, 2.7097, 4.1727, 5.3534, 2.8604, 4.1160, 3.9986], + device='cuda:2'), covar=tensor([0.0157, 0.1235, 0.1457, 0.0612, 0.0095, 0.1061, 0.0548, 0.0787], + device='cuda:2'), in_proj_covar=tensor([0.0169, 0.0273, 0.0204, 0.0196, 0.0129, 0.0183, 0.0215, 0.0226], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 15:23:57,004 INFO [train.py:898] (2/4) Epoch 22, batch 1900, loss[loss=0.1582, simple_loss=0.246, pruned_loss=0.0352, over 18393.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2522, pruned_loss=0.03664, over 3591381.25 frames. ], batch size: 48, lr: 5.12e-03, grad_scale: 4.0 +2023-03-09 15:24:18,265 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78232.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:24:55,257 INFO [train.py:898] (2/4) Epoch 22, batch 1950, loss[loss=0.1777, simple_loss=0.2684, pruned_loss=0.04347, over 18310.00 frames. ], tot_loss[loss=0.1629, simple_loss=0.2526, pruned_loss=0.03659, over 3588911.56 frames. ], batch size: 57, lr: 5.12e-03, grad_scale: 4.0 +2023-03-09 15:24:56,693 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9881, 4.6236, 4.6373, 3.4655, 3.7823, 3.6609, 2.5474, 2.5369], + device='cuda:2'), covar=tensor([0.0189, 0.0148, 0.0070, 0.0309, 0.0321, 0.0208, 0.0797, 0.0818], + device='cuda:2'), in_proj_covar=tensor([0.0070, 0.0060, 0.0062, 0.0068, 0.0088, 0.0066, 0.0076, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 15:25:03,425 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78272.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:25:14,977 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.944e+02 2.648e+02 3.049e+02 3.777e+02 6.458e+02, threshold=6.098e+02, percent-clipped=2.0 +2023-03-09 15:25:28,576 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78293.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 15:25:53,392 INFO [train.py:898] (2/4) Epoch 22, batch 2000, loss[loss=0.1518, simple_loss=0.2406, pruned_loss=0.03153, over 18384.00 frames. ], tot_loss[loss=0.1619, simple_loss=0.2516, pruned_loss=0.03607, over 3591400.31 frames. ], batch size: 52, lr: 5.12e-03, grad_scale: 8.0 +2023-03-09 15:25:59,296 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78320.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:26:09,166 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-09 15:26:14,869 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78333.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:26:52,354 INFO [train.py:898] (2/4) Epoch 22, batch 2050, loss[loss=0.1433, simple_loss=0.224, pruned_loss=0.0313, over 18383.00 frames. ], tot_loss[loss=0.1613, simple_loss=0.2513, pruned_loss=0.0357, over 3595220.79 frames. ], batch size: 42, lr: 5.12e-03, grad_scale: 8.0 +2023-03-09 15:26:59,463 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6162, 3.6246, 2.3425, 4.5002, 3.2258, 4.3903, 2.7096, 4.0170], + device='cuda:2'), covar=tensor([0.0653, 0.0758, 0.1375, 0.0517, 0.0759, 0.0280, 0.1061, 0.0424], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0226, 0.0191, 0.0285, 0.0192, 0.0264, 0.0203, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 15:27:09,387 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.1003, 5.5189, 2.9934, 5.3518, 5.2873, 5.5737, 5.3822, 2.9781], + device='cuda:2'), covar=tensor([0.0177, 0.0075, 0.0654, 0.0070, 0.0057, 0.0054, 0.0069, 0.0864], + device='cuda:2'), in_proj_covar=tensor([0.0088, 0.0081, 0.0096, 0.0096, 0.0085, 0.0077, 0.0086, 0.0098], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 15:27:10,378 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78381.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:27:11,262 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.641e+02 2.890e+02 3.341e+02 3.968e+02 7.791e+02, threshold=6.681e+02, percent-clipped=3.0 +2023-03-09 15:27:33,880 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8693, 4.2349, 2.5360, 4.0273, 5.2142, 2.7044, 3.7711, 4.0504], + device='cuda:2'), covar=tensor([0.0185, 0.1122, 0.1631, 0.0675, 0.0084, 0.1170, 0.0743, 0.0727], + device='cuda:2'), in_proj_covar=tensor([0.0171, 0.0275, 0.0206, 0.0197, 0.0130, 0.0185, 0.0218, 0.0227], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 15:27:35,978 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8581, 5.3420, 5.3040, 5.3642, 4.8232, 5.2366, 4.5884, 5.1980], + device='cuda:2'), covar=tensor([0.0272, 0.0284, 0.0206, 0.0372, 0.0383, 0.0217, 0.1197, 0.0353], + device='cuda:2'), in_proj_covar=tensor([0.0219, 0.0262, 0.0256, 0.0334, 0.0272, 0.0271, 0.0311, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 15:27:46,930 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7696, 2.4777, 2.8107, 2.8345, 3.2518, 4.9913, 4.7689, 3.5724], + device='cuda:2'), covar=tensor([0.1823, 0.2487, 0.2998, 0.1833, 0.2571, 0.0249, 0.0431, 0.0941], + device='cuda:2'), in_proj_covar=tensor([0.0306, 0.0350, 0.0388, 0.0281, 0.0394, 0.0248, 0.0298, 0.0259], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 15:27:50,918 INFO [train.py:898] (2/4) Epoch 22, batch 2100, loss[loss=0.1463, simple_loss=0.2316, pruned_loss=0.03047, over 18165.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2516, pruned_loss=0.03593, over 3591487.91 frames. ], batch size: 44, lr: 5.11e-03, grad_scale: 8.0 +2023-03-09 15:28:43,403 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78460.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:28:49,226 INFO [train.py:898] (2/4) Epoch 22, batch 2150, loss[loss=0.1667, simple_loss=0.2612, pruned_loss=0.03607, over 18362.00 frames. ], tot_loss[loss=0.1621, simple_loss=0.2521, pruned_loss=0.036, over 3593227.43 frames. ], batch size: 56, lr: 5.11e-03, grad_scale: 8.0 +2023-03-09 15:28:56,505 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-09 15:29:01,806 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1723, 5.1749, 5.4978, 5.4387, 5.1939, 6.0083, 5.6541, 5.2892], + device='cuda:2'), covar=tensor([0.1065, 0.0678, 0.0685, 0.0778, 0.1279, 0.0750, 0.0697, 0.1556], + device='cuda:2'), in_proj_covar=tensor([0.0357, 0.0285, 0.0312, 0.0314, 0.0326, 0.0424, 0.0284, 0.0417], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 15:29:08,266 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.757e+02 2.625e+02 3.036e+02 3.633e+02 1.479e+03, threshold=6.073e+02, percent-clipped=4.0 +2023-03-09 15:29:27,080 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4329, 2.6267, 2.4584, 2.8436, 3.5315, 3.3749, 2.9116, 2.7994], + device='cuda:2'), covar=tensor([0.0175, 0.0335, 0.0600, 0.0396, 0.0186, 0.0182, 0.0415, 0.0390], + device='cuda:2'), in_proj_covar=tensor([0.0142, 0.0138, 0.0163, 0.0160, 0.0133, 0.0120, 0.0157, 0.0159], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 15:29:47,486 INFO [train.py:898] (2/4) Epoch 22, batch 2200, loss[loss=0.1617, simple_loss=0.2532, pruned_loss=0.03505, over 18052.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2525, pruned_loss=0.03631, over 3594450.58 frames. ], batch size: 65, lr: 5.11e-03, grad_scale: 8.0 +2023-03-09 15:29:55,166 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78521.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:30:30,417 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.3516, 4.2009, 4.3261, 4.0953, 4.0865, 4.3166, 4.4442, 4.3299], + device='cuda:2'), covar=tensor([0.0119, 0.0124, 0.0131, 0.0166, 0.0116, 0.0166, 0.0115, 0.0163], + device='cuda:2'), in_proj_covar=tensor([0.0096, 0.0070, 0.0076, 0.0095, 0.0076, 0.0105, 0.0088, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 15:30:35,284 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78556.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:30:44,787 INFO [train.py:898] (2/4) Epoch 22, batch 2250, loss[loss=0.1636, simple_loss=0.2527, pruned_loss=0.03726, over 17993.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2533, pruned_loss=0.03665, over 3584694.80 frames. ], batch size: 65, lr: 5.11e-03, grad_scale: 8.0 +2023-03-09 15:31:04,898 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.048e+02 2.745e+02 3.101e+02 3.633e+02 7.718e+02, threshold=6.202e+02, percent-clipped=2.0 +2023-03-09 15:31:11,571 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78588.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 15:31:25,585 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-03-09 15:31:27,162 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-09 15:31:36,158 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0022, 3.6287, 5.0958, 3.0404, 4.4554, 2.6796, 3.1848, 1.7532], + device='cuda:2'), covar=tensor([0.1084, 0.0932, 0.0142, 0.0889, 0.0504, 0.2463, 0.2598, 0.2180], + device='cuda:2'), in_proj_covar=tensor([0.0223, 0.0247, 0.0197, 0.0200, 0.0260, 0.0275, 0.0327, 0.0236], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 15:31:43,721 INFO [train.py:898] (2/4) Epoch 22, batch 2300, loss[loss=0.1359, simple_loss=0.2219, pruned_loss=0.02499, over 18402.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2534, pruned_loss=0.03668, over 3586206.21 frames. ], batch size: 43, lr: 5.11e-03, grad_scale: 8.0 +2023-03-09 15:31:46,314 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78617.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:32:05,312 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.59 vs. limit=2.0 +2023-03-09 15:32:42,021 INFO [train.py:898] (2/4) Epoch 22, batch 2350, loss[loss=0.1687, simple_loss=0.2679, pruned_loss=0.03479, over 18353.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2524, pruned_loss=0.03664, over 3583424.67 frames. ], batch size: 55, lr: 5.11e-03, grad_scale: 8.0 +2023-03-09 15:33:01,734 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.907e+02 2.584e+02 2.934e+02 3.285e+02 5.590e+02, threshold=5.868e+02, percent-clipped=0.0 +2023-03-09 15:33:35,175 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5855, 3.0149, 4.3724, 3.6834, 2.7203, 4.5598, 3.8729, 2.7910], + device='cuda:2'), covar=tensor([0.0526, 0.1362, 0.0248, 0.0435, 0.1491, 0.0208, 0.0553, 0.0963], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0240, 0.0215, 0.0166, 0.0224, 0.0212, 0.0249, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 15:33:40,415 INFO [train.py:898] (2/4) Epoch 22, batch 2400, loss[loss=0.1645, simple_loss=0.2617, pruned_loss=0.03367, over 18622.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.253, pruned_loss=0.03687, over 3581397.17 frames. ], batch size: 52, lr: 5.10e-03, grad_scale: 8.0 +2023-03-09 15:34:15,218 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8993, 4.5618, 4.5216, 3.5126, 3.6901, 3.6318, 2.7923, 2.4667], + device='cuda:2'), covar=tensor([0.0224, 0.0145, 0.0108, 0.0308, 0.0373, 0.0217, 0.0652, 0.0913], + device='cuda:2'), in_proj_covar=tensor([0.0071, 0.0060, 0.0063, 0.0069, 0.0089, 0.0067, 0.0077, 0.0085], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 15:34:39,530 INFO [train.py:898] (2/4) Epoch 22, batch 2450, loss[loss=0.1555, simple_loss=0.2504, pruned_loss=0.03032, over 18621.00 frames. ], tot_loss[loss=0.1632, simple_loss=0.2526, pruned_loss=0.03689, over 3578047.02 frames. ], batch size: 52, lr: 5.10e-03, grad_scale: 4.0 +2023-03-09 15:34:51,759 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-09 15:35:00,356 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.881e+02 2.648e+02 3.233e+02 3.743e+02 6.587e+02, threshold=6.467e+02, percent-clipped=2.0 +2023-03-09 15:35:38,729 INFO [train.py:898] (2/4) Epoch 22, batch 2500, loss[loss=0.1315, simple_loss=0.2147, pruned_loss=0.02415, over 18483.00 frames. ], tot_loss[loss=0.163, simple_loss=0.2527, pruned_loss=0.0367, over 3571210.79 frames. ], batch size: 44, lr: 5.10e-03, grad_scale: 4.0 +2023-03-09 15:35:39,960 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78816.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:36:37,303 INFO [train.py:898] (2/4) Epoch 22, batch 2550, loss[loss=0.1583, simple_loss=0.2378, pruned_loss=0.03934, over 18515.00 frames. ], tot_loss[loss=0.1625, simple_loss=0.2523, pruned_loss=0.03639, over 3575490.23 frames. ], batch size: 44, lr: 5.10e-03, grad_scale: 4.0 +2023-03-09 15:36:47,947 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-09 15:36:56,999 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.832e+02 2.718e+02 3.165e+02 3.980e+02 2.438e+03, threshold=6.330e+02, percent-clipped=4.0 +2023-03-09 15:36:58,390 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2473, 5.2202, 5.5130, 5.6514, 5.1835, 6.1051, 5.7876, 5.3471], + device='cuda:2'), covar=tensor([0.1293, 0.0619, 0.0797, 0.0806, 0.1484, 0.0686, 0.0600, 0.1653], + device='cuda:2'), in_proj_covar=tensor([0.0362, 0.0288, 0.0318, 0.0319, 0.0333, 0.0430, 0.0287, 0.0423], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 15:37:03,700 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78888.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:37:30,384 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78912.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:37:30,794 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-09 15:37:34,107 INFO [train.py:898] (2/4) Epoch 22, batch 2600, loss[loss=0.1605, simple_loss=0.2591, pruned_loss=0.03092, over 18301.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.2522, pruned_loss=0.03625, over 3581328.00 frames. ], batch size: 54, lr: 5.10e-03, grad_scale: 4.0 +2023-03-09 15:37:38,606 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 15:37:45,161 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7177, 3.7534, 3.5634, 3.2098, 3.5272, 2.9203, 2.7923, 3.7315], + device='cuda:2'), covar=tensor([0.0068, 0.0074, 0.0082, 0.0134, 0.0099, 0.0187, 0.0216, 0.0073], + device='cuda:2'), in_proj_covar=tensor([0.0143, 0.0162, 0.0136, 0.0189, 0.0144, 0.0180, 0.0182, 0.0123], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 15:37:58,480 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=78936.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:38:31,481 INFO [train.py:898] (2/4) Epoch 22, batch 2650, loss[loss=0.1842, simple_loss=0.2741, pruned_loss=0.04714, over 17026.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.2524, pruned_loss=0.0362, over 3589221.75 frames. ], batch size: 78, lr: 5.10e-03, grad_scale: 4.0 +2023-03-09 15:38:31,768 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78965.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:38:51,749 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8350, 3.5835, 2.2758, 4.5657, 3.3611, 4.4105, 2.5377, 4.1620], + device='cuda:2'), covar=tensor([0.0563, 0.0744, 0.1452, 0.0494, 0.0784, 0.0277, 0.1188, 0.0364], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0226, 0.0192, 0.0287, 0.0193, 0.0263, 0.0204, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 15:38:52,453 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.743e+02 2.628e+02 3.171e+02 3.718e+02 9.310e+02, threshold=6.343e+02, percent-clipped=1.0 +2023-03-09 15:39:28,932 INFO [train.py:898] (2/4) Epoch 22, batch 2700, loss[loss=0.1683, simple_loss=0.254, pruned_loss=0.04125, over 18378.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.2526, pruned_loss=0.03614, over 3590977.16 frames. ], batch size: 56, lr: 5.09e-03, grad_scale: 4.0 +2023-03-09 15:39:42,943 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79026.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:40:27,090 INFO [train.py:898] (2/4) Epoch 22, batch 2750, loss[loss=0.1489, simple_loss=0.2391, pruned_loss=0.02937, over 18249.00 frames. ], tot_loss[loss=0.1621, simple_loss=0.2521, pruned_loss=0.03608, over 3586996.74 frames. ], batch size: 47, lr: 5.09e-03, grad_scale: 4.0 +2023-03-09 15:40:48,103 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.755e+02 2.673e+02 3.245e+02 3.869e+02 1.413e+03, threshold=6.491e+02, percent-clipped=1.0 +2023-03-09 15:41:25,114 INFO [train.py:898] (2/4) Epoch 22, batch 2800, loss[loss=0.1523, simple_loss=0.2474, pruned_loss=0.02858, over 18401.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.2526, pruned_loss=0.03609, over 3580998.52 frames. ], batch size: 52, lr: 5.09e-03, grad_scale: 8.0 +2023-03-09 15:41:26,606 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79116.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:42:22,285 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79164.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:42:23,246 INFO [train.py:898] (2/4) Epoch 22, batch 2850, loss[loss=0.1825, simple_loss=0.2776, pruned_loss=0.04368, over 18387.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.2527, pruned_loss=0.03609, over 3593883.86 frames. ], batch size: 52, lr: 5.09e-03, grad_scale: 8.0 +2023-03-09 15:42:45,531 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.599e+02 2.640e+02 3.106e+02 3.691e+02 7.833e+02, threshold=6.212e+02, percent-clipped=1.0 +2023-03-09 15:43:01,517 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9151, 5.4125, 5.3987, 5.3988, 4.8825, 5.3162, 4.7574, 5.3204], + device='cuda:2'), covar=tensor([0.0202, 0.0271, 0.0169, 0.0411, 0.0406, 0.0212, 0.0979, 0.0294], + device='cuda:2'), in_proj_covar=tensor([0.0221, 0.0266, 0.0261, 0.0339, 0.0277, 0.0277, 0.0314, 0.0266], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 15:43:15,198 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9987, 3.8767, 5.1834, 4.6239, 3.5417, 3.2328, 4.7047, 5.4456], + device='cuda:2'), covar=tensor([0.0768, 0.1541, 0.0196, 0.0382, 0.0861, 0.1106, 0.0350, 0.0296], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0279, 0.0159, 0.0184, 0.0194, 0.0193, 0.0198, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 15:43:19,628 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79212.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:43:20,787 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4798, 2.4193, 4.0041, 3.5224, 2.4100, 4.1427, 3.6714, 2.6437], + device='cuda:2'), covar=tensor([0.0516, 0.1684, 0.0379, 0.0382, 0.1651, 0.0229, 0.0627, 0.1039], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0239, 0.0213, 0.0166, 0.0224, 0.0211, 0.0249, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 15:43:22,658 INFO [train.py:898] (2/4) Epoch 22, batch 2900, loss[loss=0.1527, simple_loss=0.2435, pruned_loss=0.03093, over 18540.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2525, pruned_loss=0.036, over 3593200.53 frames. ], batch size: 49, lr: 5.09e-03, grad_scale: 4.0 +2023-03-09 15:43:52,378 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8781, 3.0516, 4.5888, 3.7494, 2.9219, 4.7797, 4.0749, 3.1399], + device='cuda:2'), covar=tensor([0.0455, 0.1390, 0.0248, 0.0468, 0.1358, 0.0172, 0.0508, 0.0833], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0240, 0.0214, 0.0166, 0.0224, 0.0212, 0.0249, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 15:44:15,606 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79260.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:44:21,262 INFO [train.py:898] (2/4) Epoch 22, batch 2950, loss[loss=0.1407, simple_loss=0.2258, pruned_loss=0.02778, over 18498.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2525, pruned_loss=0.03612, over 3580065.95 frames. ], batch size: 44, lr: 5.09e-03, grad_scale: 4.0 +2023-03-09 15:44:32,724 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.30 vs. limit=5.0 +2023-03-09 15:44:42,794 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.956e+02 2.705e+02 3.045e+02 3.689e+02 6.352e+02, threshold=6.090e+02, percent-clipped=1.0 +2023-03-09 15:44:44,817 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79285.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 15:45:10,088 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5515, 5.4678, 5.1397, 5.4579, 5.4015, 4.8407, 5.3240, 5.1120], + device='cuda:2'), covar=tensor([0.0375, 0.0437, 0.1240, 0.0728, 0.0647, 0.0419, 0.0442, 0.0992], + device='cuda:2'), in_proj_covar=tensor([0.0500, 0.0561, 0.0703, 0.0442, 0.0454, 0.0514, 0.0548, 0.0680], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 15:45:19,367 INFO [train.py:898] (2/4) Epoch 22, batch 3000, loss[loss=0.1568, simple_loss=0.2339, pruned_loss=0.03989, over 17687.00 frames. ], tot_loss[loss=0.1622, simple_loss=0.2525, pruned_loss=0.03592, over 3586735.51 frames. ], batch size: 39, lr: 5.09e-03, grad_scale: 4.0 +2023-03-09 15:45:19,367 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 15:45:31,239 INFO [train.py:932] (2/4) Epoch 22, validation: loss=0.1498, simple_loss=0.249, pruned_loss=0.02526, over 944034.00 frames. +2023-03-09 15:45:31,240 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 15:45:38,864 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79321.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:45:46,629 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2913, 5.2266, 5.5707, 5.5757, 5.1891, 6.0591, 5.7285, 5.3736], + device='cuda:2'), covar=tensor([0.0966, 0.0601, 0.0665, 0.0643, 0.1268, 0.0676, 0.0616, 0.1361], + device='cuda:2'), in_proj_covar=tensor([0.0361, 0.0285, 0.0316, 0.0316, 0.0330, 0.0428, 0.0285, 0.0420], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 15:45:53,347 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79333.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:46:08,226 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79346.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 15:46:13,831 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.74 vs. limit=2.0 +2023-03-09 15:46:28,605 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79364.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:46:29,399 INFO [train.py:898] (2/4) Epoch 22, batch 3050, loss[loss=0.1809, simple_loss=0.2709, pruned_loss=0.0455, over 18259.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2524, pruned_loss=0.03607, over 3593541.20 frames. ], batch size: 60, lr: 5.08e-03, grad_scale: 4.0 +2023-03-09 15:46:41,867 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79375.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 15:46:48,482 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8405, 2.9930, 4.3962, 3.6968, 2.8894, 4.6590, 4.0310, 3.0257], + device='cuda:2'), covar=tensor([0.0496, 0.1521, 0.0298, 0.0463, 0.1458, 0.0208, 0.0523, 0.0926], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0237, 0.0213, 0.0165, 0.0223, 0.0210, 0.0247, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 15:46:52,689 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.974e+02 2.697e+02 3.145e+02 3.968e+02 7.194e+02, threshold=6.290e+02, percent-clipped=6.0 +2023-03-09 15:47:04,542 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79394.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:47:28,504 INFO [train.py:898] (2/4) Epoch 22, batch 3100, loss[loss=0.1534, simple_loss=0.232, pruned_loss=0.03739, over 18431.00 frames. ], tot_loss[loss=0.1625, simple_loss=0.2526, pruned_loss=0.03617, over 3592777.67 frames. ], batch size: 43, lr: 5.08e-03, grad_scale: 4.0 +2023-03-09 15:47:40,668 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79425.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:47:54,519 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79436.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 15:48:20,182 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8960, 3.5827, 4.7933, 2.7087, 4.2679, 2.5001, 2.9577, 1.8402], + device='cuda:2'), covar=tensor([0.1100, 0.0895, 0.0157, 0.1007, 0.0563, 0.2604, 0.2696, 0.2095], + device='cuda:2'), in_proj_covar=tensor([0.0221, 0.0245, 0.0198, 0.0199, 0.0258, 0.0273, 0.0321, 0.0236], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 15:48:27,402 INFO [train.py:898] (2/4) Epoch 22, batch 3150, loss[loss=0.1732, simple_loss=0.2671, pruned_loss=0.03963, over 18273.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.2528, pruned_loss=0.03602, over 3595680.30 frames. ], batch size: 57, lr: 5.08e-03, grad_scale: 4.0 +2023-03-09 15:48:50,537 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.854e+02 2.702e+02 3.215e+02 3.838e+02 6.492e+02, threshold=6.430e+02, percent-clipped=3.0 +2023-03-09 15:49:25,662 INFO [train.py:898] (2/4) Epoch 22, batch 3200, loss[loss=0.1454, simple_loss=0.2292, pruned_loss=0.03077, over 18401.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2532, pruned_loss=0.03612, over 3587874.42 frames. ], batch size: 42, lr: 5.08e-03, grad_scale: 8.0 +2023-03-09 15:49:55,941 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7920, 3.1571, 4.5259, 3.7532, 2.8033, 4.7560, 3.9382, 3.0408], + device='cuda:2'), covar=tensor([0.0481, 0.1328, 0.0240, 0.0458, 0.1492, 0.0177, 0.0626, 0.0927], + device='cuda:2'), in_proj_covar=tensor([0.0213, 0.0237, 0.0211, 0.0165, 0.0223, 0.0211, 0.0248, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 15:50:24,052 INFO [train.py:898] (2/4) Epoch 22, batch 3250, loss[loss=0.1898, simple_loss=0.2709, pruned_loss=0.05433, over 12119.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2521, pruned_loss=0.0357, over 3576186.18 frames. ], batch size: 129, lr: 5.08e-03, grad_scale: 8.0 +2023-03-09 15:50:46,087 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.828e+02 2.603e+02 3.051e+02 3.520e+02 6.535e+02, threshold=6.103e+02, percent-clipped=1.0 +2023-03-09 15:50:47,564 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79585.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:51:03,250 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-09 15:51:21,984 INFO [train.py:898] (2/4) Epoch 22, batch 3300, loss[loss=0.1562, simple_loss=0.2375, pruned_loss=0.0374, over 18559.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.2508, pruned_loss=0.03545, over 3586603.32 frames. ], batch size: 45, lr: 5.08e-03, grad_scale: 8.0 +2023-03-09 15:51:28,943 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79621.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:51:52,900 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79641.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 15:51:58,675 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79646.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:52:10,794 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79657.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:52:19,965 INFO [train.py:898] (2/4) Epoch 22, batch 3350, loss[loss=0.1795, simple_loss=0.2699, pruned_loss=0.04456, over 18357.00 frames. ], tot_loss[loss=0.161, simple_loss=0.2509, pruned_loss=0.0355, over 3598403.78 frames. ], batch size: 56, lr: 5.07e-03, grad_scale: 4.0 +2023-03-09 15:52:24,606 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79669.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:52:42,673 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.916e+02 2.736e+02 3.253e+02 3.881e+02 1.172e+03, threshold=6.507e+02, percent-clipped=7.0 +2023-03-09 15:52:47,556 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79689.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:53:18,221 INFO [train.py:898] (2/4) Epoch 22, batch 3400, loss[loss=0.1361, simple_loss=0.2215, pruned_loss=0.02532, over 18544.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.251, pruned_loss=0.03561, over 3584468.31 frames. ], batch size: 49, lr: 5.07e-03, grad_scale: 4.0 +2023-03-09 15:53:22,118 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79718.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:53:24,116 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79720.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:53:36,283 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79731.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 15:54:12,612 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79762.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:54:15,569 INFO [train.py:898] (2/4) Epoch 22, batch 3450, loss[loss=0.1529, simple_loss=0.2464, pruned_loss=0.02969, over 18552.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.2506, pruned_loss=0.03562, over 3590011.59 frames. ], batch size: 54, lr: 5.07e-03, grad_scale: 4.0 +2023-03-09 15:54:38,366 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.822e+02 2.532e+02 2.897e+02 3.710e+02 6.365e+02, threshold=5.795e+02, percent-clipped=0.0 +2023-03-09 15:55:14,088 INFO [train.py:898] (2/4) Epoch 22, batch 3500, loss[loss=0.1662, simple_loss=0.2505, pruned_loss=0.04101, over 18544.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.2504, pruned_loss=0.03555, over 3593461.45 frames. ], batch size: 49, lr: 5.07e-03, grad_scale: 4.0 +2023-03-09 15:55:24,027 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79823.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:56:09,539 INFO [train.py:898] (2/4) Epoch 22, batch 3550, loss[loss=0.1764, simple_loss=0.2696, pruned_loss=0.04156, over 18217.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2501, pruned_loss=0.0355, over 3586533.56 frames. ], batch size: 60, lr: 5.07e-03, grad_scale: 4.0 +2023-03-09 15:56:31,862 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-09 15:56:32,290 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.670e+02 2.485e+02 2.929e+02 3.619e+02 1.143e+03, threshold=5.859e+02, percent-clipped=2.0 +2023-03-09 15:57:05,064 INFO [train.py:898] (2/4) Epoch 22, batch 3600, loss[loss=0.1695, simple_loss=0.2622, pruned_loss=0.03846, over 18299.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.25, pruned_loss=0.03566, over 3572468.19 frames. ], batch size: 57, lr: 5.07e-03, grad_scale: 8.0 +2023-03-09 15:57:26,871 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0788, 3.4222, 3.3457, 2.9038, 2.9851, 2.8106, 2.4815, 2.3557], + device='cuda:2'), covar=tensor([0.0232, 0.0142, 0.0124, 0.0282, 0.0320, 0.0249, 0.0583, 0.0659], + device='cuda:2'), in_proj_covar=tensor([0.0070, 0.0060, 0.0063, 0.0069, 0.0089, 0.0067, 0.0077, 0.0085], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 15:57:32,498 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79941.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:57:32,632 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79941.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 15:58:07,277 INFO [train.py:898] (2/4) Epoch 23, batch 0, loss[loss=0.1819, simple_loss=0.2744, pruned_loss=0.0447, over 18466.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2744, pruned_loss=0.0447, over 18466.00 frames. ], batch size: 59, lr: 4.95e-03, grad_scale: 8.0 +2023-03-09 15:58:07,278 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 15:58:18,934 INFO [train.py:932] (2/4) Epoch 23, validation: loss=0.1494, simple_loss=0.2493, pruned_loss=0.02473, over 944034.00 frames. +2023-03-09 15:58:18,935 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 15:59:01,339 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.492e+02 2.757e+02 3.307e+02 4.142e+02 8.059e+02, threshold=6.615e+02, percent-clipped=1.0 +2023-03-09 15:59:06,140 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=79989.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 15:59:06,182 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79989.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:59:17,067 INFO [train.py:898] (2/4) Epoch 23, batch 50, loss[loss=0.1716, simple_loss=0.2641, pruned_loss=0.03955, over 17787.00 frames. ], tot_loss[loss=0.1619, simple_loss=0.2522, pruned_loss=0.03577, over 811299.18 frames. ], batch size: 70, lr: 4.95e-03, grad_scale: 8.0 +2023-03-09 15:59:38,349 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80013.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:59:46,555 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80020.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 15:59:50,472 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3234, 5.3245, 5.5865, 5.5862, 5.1739, 6.0893, 5.7237, 5.3079], + device='cuda:2'), covar=tensor([0.1141, 0.0593, 0.0811, 0.0894, 0.1461, 0.0748, 0.0754, 0.1710], + device='cuda:2'), in_proj_covar=tensor([0.0365, 0.0287, 0.0317, 0.0316, 0.0329, 0.0434, 0.0287, 0.0422], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 16:00:00,059 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80031.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 16:00:06,743 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80037.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:00:20,050 INFO [train.py:898] (2/4) Epoch 23, batch 100, loss[loss=0.1804, simple_loss=0.2641, pruned_loss=0.04832, over 12329.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2538, pruned_loss=0.03638, over 1404606.77 frames. ], batch size: 129, lr: 4.95e-03, grad_scale: 8.0 +2023-03-09 16:00:42,471 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80068.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:00:56,572 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80079.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 16:01:02,964 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.638e+02 2.619e+02 2.981e+02 3.602e+02 9.245e+02, threshold=5.963e+02, percent-clipped=1.0 +2023-03-09 16:01:18,869 INFO [train.py:898] (2/4) Epoch 23, batch 150, loss[loss=0.16, simple_loss=0.2552, pruned_loss=0.03234, over 18303.00 frames. ], tot_loss[loss=0.1619, simple_loss=0.252, pruned_loss=0.03586, over 1891516.03 frames. ], batch size: 54, lr: 4.95e-03, grad_scale: 8.0 +2023-03-09 16:01:40,173 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80118.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:01:58,610 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-09 16:02:04,973 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7266, 3.8047, 3.7004, 3.2587, 3.5161, 2.9900, 2.9373, 3.7961], + device='cuda:2'), covar=tensor([0.0072, 0.0106, 0.0086, 0.0149, 0.0100, 0.0189, 0.0209, 0.0071], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0164, 0.0137, 0.0191, 0.0145, 0.0181, 0.0184, 0.0124], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 16:02:07,369 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8779, 3.5565, 5.0440, 2.8810, 4.3139, 2.5504, 3.1513, 1.7242], + device='cuda:2'), covar=tensor([0.1201, 0.0997, 0.0186, 0.0995, 0.0589, 0.2747, 0.2551, 0.2331], + device='cuda:2'), in_proj_covar=tensor([0.0225, 0.0249, 0.0203, 0.0203, 0.0262, 0.0275, 0.0328, 0.0241], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 16:02:10,760 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6438, 2.9007, 2.4605, 2.8940, 3.7123, 3.6666, 3.1735, 2.9217], + device='cuda:2'), covar=tensor([0.0185, 0.0260, 0.0578, 0.0385, 0.0178, 0.0138, 0.0387, 0.0380], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0142, 0.0168, 0.0164, 0.0136, 0.0122, 0.0159, 0.0163], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 16:02:17,083 INFO [train.py:898] (2/4) Epoch 23, batch 200, loss[loss=0.1576, simple_loss=0.2458, pruned_loss=0.03467, over 18263.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2519, pruned_loss=0.03582, over 2279838.43 frames. ], batch size: 47, lr: 4.95e-03, grad_scale: 8.0 +2023-03-09 16:02:59,531 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.861e+02 2.844e+02 3.404e+02 4.018e+02 9.589e+02, threshold=6.808e+02, percent-clipped=5.0 +2023-03-09 16:03:15,927 INFO [train.py:898] (2/4) Epoch 23, batch 250, loss[loss=0.1679, simple_loss=0.2593, pruned_loss=0.03819, over 17029.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2528, pruned_loss=0.03592, over 2569326.21 frames. ], batch size: 78, lr: 4.94e-03, grad_scale: 8.0 +2023-03-09 16:03:33,274 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7454, 4.7284, 4.4441, 4.6309, 4.6569, 4.0899, 4.5999, 4.4083], + device='cuda:2'), covar=tensor([0.0452, 0.0520, 0.1178, 0.0804, 0.0654, 0.0465, 0.0492, 0.1203], + device='cuda:2'), in_proj_covar=tensor([0.0500, 0.0567, 0.0708, 0.0444, 0.0459, 0.0518, 0.0550, 0.0687], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 16:04:03,421 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9777, 5.4227, 2.8417, 5.2593, 5.1751, 5.4663, 5.2430, 2.7603], + device='cuda:2'), covar=tensor([0.0235, 0.0099, 0.0803, 0.0074, 0.0081, 0.0085, 0.0124, 0.0984], + device='cuda:2'), in_proj_covar=tensor([0.0088, 0.0080, 0.0094, 0.0095, 0.0085, 0.0076, 0.0085, 0.0096], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 16:04:05,499 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80241.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:04:14,919 INFO [train.py:898] (2/4) Epoch 23, batch 300, loss[loss=0.1555, simple_loss=0.2354, pruned_loss=0.03784, over 17743.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2515, pruned_loss=0.03575, over 2799615.12 frames. ], batch size: 39, lr: 4.94e-03, grad_scale: 8.0 +2023-03-09 16:04:52,929 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80283.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:04:54,788 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.957e+02 2.544e+02 3.151e+02 3.664e+02 8.600e+02, threshold=6.302e+02, percent-clipped=1.0 +2023-03-09 16:05:00,694 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80289.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:05:12,226 INFO [train.py:898] (2/4) Epoch 23, batch 350, loss[loss=0.149, simple_loss=0.2343, pruned_loss=0.03185, over 18528.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.2525, pruned_loss=0.0361, over 2957833.88 frames. ], batch size: 49, lr: 4.94e-03, grad_scale: 8.0 +2023-03-09 16:05:21,614 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.3439, 2.0602, 1.9748, 2.0345, 2.4382, 2.4683, 2.3141, 2.1507], + device='cuda:2'), covar=tensor([0.0242, 0.0264, 0.0490, 0.0374, 0.0229, 0.0210, 0.0396, 0.0330], + device='cuda:2'), in_proj_covar=tensor([0.0143, 0.0140, 0.0166, 0.0163, 0.0135, 0.0121, 0.0158, 0.0161], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 16:05:27,998 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80313.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:05:59,738 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1638, 5.2522, 5.5798, 5.6300, 5.1368, 6.0595, 5.6869, 5.4324], + device='cuda:2'), covar=tensor([0.1231, 0.0602, 0.0753, 0.0795, 0.1331, 0.0709, 0.0642, 0.1370], + device='cuda:2'), in_proj_covar=tensor([0.0364, 0.0287, 0.0317, 0.0315, 0.0330, 0.0432, 0.0285, 0.0422], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 16:06:04,402 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80344.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:06:09,756 INFO [train.py:898] (2/4) Epoch 23, batch 400, loss[loss=0.1558, simple_loss=0.2483, pruned_loss=0.03167, over 18195.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.252, pruned_loss=0.03629, over 3074336.26 frames. ], batch size: 60, lr: 4.94e-03, grad_scale: 8.0 +2023-03-09 16:06:23,709 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80361.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:06:50,967 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.659e+02 2.745e+02 3.129e+02 3.783e+02 6.813e+02, threshold=6.257e+02, percent-clipped=3.0 +2023-03-09 16:07:02,245 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80393.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:07:08,711 INFO [train.py:898] (2/4) Epoch 23, batch 450, loss[loss=0.1461, simple_loss=0.2402, pruned_loss=0.02603, over 18280.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2513, pruned_loss=0.03585, over 3186412.33 frames. ], batch size: 47, lr: 4.94e-03, grad_scale: 8.0 +2023-03-09 16:07:31,470 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80418.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:08:07,201 INFO [train.py:898] (2/4) Epoch 23, batch 500, loss[loss=0.1856, simple_loss=0.2764, pruned_loss=0.04737, over 18472.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2509, pruned_loss=0.03561, over 3281384.59 frames. ], batch size: 59, lr: 4.94e-03, grad_scale: 8.0 +2023-03-09 16:08:13,212 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80454.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:08:23,723 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80463.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:08:26,787 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80466.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:08:33,858 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 16:08:47,895 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.857e+02 2.536e+02 3.027e+02 3.548e+02 8.560e+02, threshold=6.054e+02, percent-clipped=2.0 +2023-03-09 16:08:58,981 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80494.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:09:05,400 INFO [train.py:898] (2/4) Epoch 23, batch 550, loss[loss=0.161, simple_loss=0.2452, pruned_loss=0.0384, over 17786.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2518, pruned_loss=0.03583, over 3351400.55 frames. ], batch size: 70, lr: 4.93e-03, grad_scale: 8.0 +2023-03-09 16:09:34,069 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80524.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:10:02,835 INFO [train.py:898] (2/4) Epoch 23, batch 600, loss[loss=0.1653, simple_loss=0.2589, pruned_loss=0.03588, over 18208.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.2509, pruned_loss=0.03548, over 3402592.94 frames. ], batch size: 60, lr: 4.93e-03, grad_scale: 8.0 +2023-03-09 16:10:05,903 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3626, 5.3414, 5.6707, 5.7138, 5.3336, 6.1921, 5.8394, 5.3898], + device='cuda:2'), covar=tensor([0.1147, 0.0557, 0.0763, 0.0741, 0.1393, 0.0639, 0.0603, 0.1910], + device='cuda:2'), in_proj_covar=tensor([0.0362, 0.0285, 0.0316, 0.0314, 0.0329, 0.0430, 0.0284, 0.0422], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 16:10:10,555 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80555.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:10:44,262 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.750e+02 2.691e+02 3.309e+02 4.060e+02 6.105e+02, threshold=6.618e+02, percent-clipped=2.0 +2023-03-09 16:10:59,903 INFO [train.py:898] (2/4) Epoch 23, batch 650, loss[loss=0.1543, simple_loss=0.2352, pruned_loss=0.03671, over 18401.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2516, pruned_loss=0.03598, over 3433109.09 frames. ], batch size: 42, lr: 4.93e-03, grad_scale: 8.0 +2023-03-09 16:11:14,968 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-03-09 16:11:24,927 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6694, 2.9329, 2.8047, 2.9611, 3.7834, 3.7397, 3.2210, 3.1320], + device='cuda:2'), covar=tensor([0.0165, 0.0309, 0.0521, 0.0392, 0.0185, 0.0136, 0.0384, 0.0359], + device='cuda:2'), in_proj_covar=tensor([0.0142, 0.0140, 0.0164, 0.0161, 0.0135, 0.0120, 0.0157, 0.0160], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 16:11:35,144 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.3148, 3.7714, 2.2595, 3.6500, 4.4937, 2.6032, 3.2996, 3.3926], + device='cuda:2'), covar=tensor([0.0244, 0.1130, 0.1732, 0.0702, 0.0140, 0.1188, 0.0850, 0.0936], + device='cuda:2'), in_proj_covar=tensor([0.0170, 0.0270, 0.0204, 0.0196, 0.0130, 0.0182, 0.0215, 0.0225], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 16:11:47,300 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80639.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:11:58,521 INFO [train.py:898] (2/4) Epoch 23, batch 700, loss[loss=0.1392, simple_loss=0.2266, pruned_loss=0.02588, over 18369.00 frames. ], tot_loss[loss=0.1613, simple_loss=0.251, pruned_loss=0.03576, over 3466650.78 frames. ], batch size: 46, lr: 4.93e-03, grad_scale: 8.0 +2023-03-09 16:12:41,894 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.765e+02 2.683e+02 3.069e+02 3.814e+02 7.668e+02, threshold=6.138e+02, percent-clipped=2.0 +2023-03-09 16:12:46,772 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.6863, 6.1155, 5.7055, 5.9252, 5.7288, 5.6193, 6.2270, 6.1298], + device='cuda:2'), covar=tensor([0.1208, 0.0813, 0.0397, 0.0701, 0.1481, 0.0716, 0.0585, 0.0734], + device='cuda:2'), in_proj_covar=tensor([0.0623, 0.0541, 0.0387, 0.0565, 0.0765, 0.0560, 0.0774, 0.0589], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 16:12:57,648 INFO [train.py:898] (2/4) Epoch 23, batch 750, loss[loss=0.1368, simple_loss=0.225, pruned_loss=0.02425, over 18580.00 frames. ], tot_loss[loss=0.161, simple_loss=0.2505, pruned_loss=0.03576, over 3493108.78 frames. ], batch size: 45, lr: 4.93e-03, grad_scale: 8.0 +2023-03-09 16:13:41,971 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80736.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:13:56,516 INFO [train.py:898] (2/4) Epoch 23, batch 800, loss[loss=0.1581, simple_loss=0.2559, pruned_loss=0.03016, over 18322.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.2506, pruned_loss=0.03562, over 3517435.44 frames. ], batch size: 54, lr: 4.93e-03, grad_scale: 8.0 +2023-03-09 16:13:56,760 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80749.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:14:22,826 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8888, 5.4301, 5.4144, 5.4171, 4.8806, 5.3153, 4.7680, 5.2664], + device='cuda:2'), covar=tensor([0.0221, 0.0266, 0.0169, 0.0315, 0.0401, 0.0201, 0.0956, 0.0298], + device='cuda:2'), in_proj_covar=tensor([0.0218, 0.0260, 0.0256, 0.0332, 0.0272, 0.0268, 0.0305, 0.0261], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 16:14:25,111 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80773.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:14:27,584 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8880, 3.7774, 5.0643, 4.5456, 3.5331, 3.1710, 4.5704, 5.2863], + device='cuda:2'), covar=tensor([0.0809, 0.1614, 0.0229, 0.0401, 0.0924, 0.1162, 0.0387, 0.0228], + device='cuda:2'), in_proj_covar=tensor([0.0151, 0.0279, 0.0160, 0.0184, 0.0193, 0.0193, 0.0199, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 16:14:38,919 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.739e+02 2.483e+02 2.864e+02 3.599e+02 5.476e+02, threshold=5.727e+02, percent-clipped=0.0 +2023-03-09 16:14:52,985 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80797.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:14:54,771 INFO [train.py:898] (2/4) Epoch 23, batch 850, loss[loss=0.1607, simple_loss=0.2516, pruned_loss=0.03494, over 18385.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2517, pruned_loss=0.0359, over 3539541.72 frames. ], batch size: 52, lr: 4.93e-03, grad_scale: 8.0 +2023-03-09 16:15:18,313 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80819.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:15:36,668 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80834.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:15:51,375 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 16:15:52,796 INFO [train.py:898] (2/4) Epoch 23, batch 900, loss[loss=0.16, simple_loss=0.2549, pruned_loss=0.03253, over 18304.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2515, pruned_loss=0.03594, over 3549896.79 frames. ], batch size: 54, lr: 4.92e-03, grad_scale: 8.0 +2023-03-09 16:15:54,146 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80850.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:16:34,845 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.923e+02 2.539e+02 2.932e+02 3.679e+02 1.116e+03, threshold=5.864e+02, percent-clipped=6.0 +2023-03-09 16:16:51,058 INFO [train.py:898] (2/4) Epoch 23, batch 950, loss[loss=0.1643, simple_loss=0.2618, pruned_loss=0.0334, over 16172.00 frames. ], tot_loss[loss=0.1614, simple_loss=0.2515, pruned_loss=0.03567, over 3563019.91 frames. ], batch size: 94, lr: 4.92e-03, grad_scale: 8.0 +2023-03-09 16:17:37,161 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80939.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:17:48,636 INFO [train.py:898] (2/4) Epoch 23, batch 1000, loss[loss=0.1669, simple_loss=0.2576, pruned_loss=0.03807, over 16054.00 frames. ], tot_loss[loss=0.1614, simple_loss=0.2517, pruned_loss=0.03551, over 3574002.73 frames. ], batch size: 94, lr: 4.92e-03, grad_scale: 8.0 +2023-03-09 16:17:56,007 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-03-09 16:18:30,653 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.676e+02 2.676e+02 3.165e+02 3.583e+02 7.202e+02, threshold=6.331e+02, percent-clipped=5.0 +2023-03-09 16:18:33,152 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=80987.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:18:47,010 INFO [train.py:898] (2/4) Epoch 23, batch 1050, loss[loss=0.1568, simple_loss=0.2476, pruned_loss=0.03302, over 18371.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.2506, pruned_loss=0.03533, over 3581533.42 frames. ], batch size: 46, lr: 4.92e-03, grad_scale: 8.0 +2023-03-09 16:19:11,164 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7675, 3.6498, 4.9757, 4.3863, 3.2947, 2.9734, 4.4246, 5.1738], + device='cuda:2'), covar=tensor([0.0845, 0.1397, 0.0179, 0.0402, 0.0997, 0.1215, 0.0399, 0.0222], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0281, 0.0160, 0.0185, 0.0194, 0.0194, 0.0199, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 16:19:12,416 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7823, 2.5420, 2.8290, 2.9096, 3.4833, 4.9843, 4.8284, 3.4438], + device='cuda:2'), covar=tensor([0.1807, 0.2385, 0.2970, 0.1775, 0.2235, 0.0256, 0.0368, 0.0986], + device='cuda:2'), in_proj_covar=tensor([0.0309, 0.0352, 0.0389, 0.0283, 0.0393, 0.0253, 0.0299, 0.0260], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 16:19:45,399 INFO [train.py:898] (2/4) Epoch 23, batch 1100, loss[loss=0.1706, simple_loss=0.2669, pruned_loss=0.03714, over 18633.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2501, pruned_loss=0.03494, over 3598785.19 frames. ], batch size: 52, lr: 4.92e-03, grad_scale: 8.0 +2023-03-09 16:19:45,685 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81049.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:20:27,977 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.813e+02 2.500e+02 2.967e+02 3.515e+02 7.145e+02, threshold=5.934e+02, percent-clipped=1.0 +2023-03-09 16:20:35,941 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81092.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:20:41,604 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81097.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:20:43,684 INFO [train.py:898] (2/4) Epoch 23, batch 1150, loss[loss=0.1401, simple_loss=0.2193, pruned_loss=0.03044, over 18449.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.25, pruned_loss=0.03518, over 3591541.12 frames. ], batch size: 43, lr: 4.92e-03, grad_scale: 8.0 +2023-03-09 16:20:51,703 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-03-09 16:21:03,475 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81116.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:21:06,586 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81119.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:21:18,132 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81129.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:21:41,486 INFO [train.py:898] (2/4) Epoch 23, batch 1200, loss[loss=0.1748, simple_loss=0.2655, pruned_loss=0.04204, over 17985.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.2508, pruned_loss=0.03538, over 3594724.96 frames. ], batch size: 65, lr: 4.91e-03, grad_scale: 8.0 +2023-03-09 16:21:42,953 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81150.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:22:02,577 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81167.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:22:02,907 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6357, 2.3541, 2.6523, 2.6082, 3.1784, 4.7545, 4.6465, 3.3625], + device='cuda:2'), covar=tensor([0.1901, 0.2464, 0.2857, 0.1965, 0.2420, 0.0250, 0.0403, 0.0999], + device='cuda:2'), in_proj_covar=tensor([0.0309, 0.0353, 0.0390, 0.0284, 0.0393, 0.0253, 0.0299, 0.0261], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 16:22:13,718 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81177.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:22:14,901 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9422, 3.7703, 5.0647, 4.5797, 3.4066, 3.1397, 4.5158, 5.3123], + device='cuda:2'), covar=tensor([0.0800, 0.1422, 0.0198, 0.0378, 0.0942, 0.1152, 0.0384, 0.0201], + device='cuda:2'), in_proj_covar=tensor([0.0151, 0.0279, 0.0160, 0.0185, 0.0193, 0.0192, 0.0199, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 16:22:22,852 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.778e+02 2.664e+02 3.163e+02 3.705e+02 6.920e+02, threshold=6.326e+02, percent-clipped=3.0 +2023-03-09 16:22:39,003 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81198.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:22:39,917 INFO [train.py:898] (2/4) Epoch 23, batch 1250, loss[loss=0.1401, simple_loss=0.2219, pruned_loss=0.02915, over 18437.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2499, pruned_loss=0.0351, over 3597834.91 frames. ], batch size: 43, lr: 4.91e-03, grad_scale: 8.0 +2023-03-09 16:23:39,293 INFO [train.py:898] (2/4) Epoch 23, batch 1300, loss[loss=0.1664, simple_loss=0.2619, pruned_loss=0.03545, over 16035.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.2499, pruned_loss=0.03514, over 3589252.76 frames. ], batch size: 94, lr: 4.91e-03, grad_scale: 8.0 +2023-03-09 16:24:22,200 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.771e+02 2.557e+02 2.964e+02 3.868e+02 7.836e+02, threshold=5.928e+02, percent-clipped=1.0 +2023-03-09 16:24:28,394 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7256, 3.6135, 4.8614, 4.3654, 3.2295, 3.0390, 4.3859, 5.0939], + device='cuda:2'), covar=tensor([0.0778, 0.1327, 0.0226, 0.0374, 0.0930, 0.1071, 0.0368, 0.0194], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0277, 0.0158, 0.0183, 0.0192, 0.0191, 0.0197, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 16:24:28,423 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7981, 3.5373, 5.0219, 2.9520, 4.3722, 2.6361, 3.0613, 1.8653], + device='cuda:2'), covar=tensor([0.1183, 0.0949, 0.0153, 0.0892, 0.0516, 0.2500, 0.2490, 0.2100], + device='cuda:2'), in_proj_covar=tensor([0.0223, 0.0247, 0.0204, 0.0201, 0.0259, 0.0274, 0.0326, 0.0238], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 16:24:37,825 INFO [train.py:898] (2/4) Epoch 23, batch 1350, loss[loss=0.1668, simple_loss=0.2514, pruned_loss=0.04111, over 18266.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.2508, pruned_loss=0.03531, over 3589576.47 frames. ], batch size: 47, lr: 4.91e-03, grad_scale: 4.0 +2023-03-09 16:24:38,285 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81299.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:24:40,419 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81301.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:25:12,910 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7576, 4.3290, 4.3233, 3.3020, 3.5938, 3.3047, 2.5917, 2.4116], + device='cuda:2'), covar=tensor([0.0238, 0.0161, 0.0085, 0.0323, 0.0350, 0.0270, 0.0730, 0.0906], + device='cuda:2'), in_proj_covar=tensor([0.0072, 0.0061, 0.0064, 0.0070, 0.0091, 0.0068, 0.0078, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 16:25:36,790 INFO [train.py:898] (2/4) Epoch 23, batch 1400, loss[loss=0.1681, simple_loss=0.2595, pruned_loss=0.03835, over 18390.00 frames. ], tot_loss[loss=0.1612, simple_loss=0.2514, pruned_loss=0.03546, over 3587019.48 frames. ], batch size: 52, lr: 4.91e-03, grad_scale: 4.0 +2023-03-09 16:25:49,586 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81360.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:25:51,955 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81362.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:26:16,141 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5574, 6.0307, 5.6725, 5.8302, 5.6799, 5.5011, 6.1084, 6.0632], + device='cuda:2'), covar=tensor([0.1109, 0.0747, 0.0380, 0.0693, 0.1357, 0.0682, 0.0573, 0.0670], + device='cuda:2'), in_proj_covar=tensor([0.0615, 0.0531, 0.0382, 0.0560, 0.0757, 0.0553, 0.0764, 0.0580], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 16:26:16,224 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9659, 5.0313, 5.0501, 4.7566, 4.8793, 4.8634, 5.1895, 5.1446], + device='cuda:2'), covar=tensor([0.0080, 0.0069, 0.0058, 0.0117, 0.0056, 0.0160, 0.0068, 0.0091], + device='cuda:2'), in_proj_covar=tensor([0.0095, 0.0071, 0.0075, 0.0094, 0.0076, 0.0105, 0.0089, 0.0087], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 16:26:19,227 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.872e+02 2.620e+02 3.077e+02 3.710e+02 7.565e+02, threshold=6.154e+02, percent-clipped=6.0 +2023-03-09 16:26:24,033 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-09 16:26:26,827 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81392.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:26:35,545 INFO [train.py:898] (2/4) Epoch 23, batch 1450, loss[loss=0.2057, simple_loss=0.2862, pruned_loss=0.06263, over 12539.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2517, pruned_loss=0.03564, over 3580380.11 frames. ], batch size: 129, lr: 4.91e-03, grad_scale: 4.0 +2023-03-09 16:26:49,030 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.39 vs. limit=5.0 +2023-03-09 16:27:10,714 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81429.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:27:22,049 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-09 16:27:23,600 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81440.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:27:34,478 INFO [train.py:898] (2/4) Epoch 23, batch 1500, loss[loss=0.1607, simple_loss=0.2443, pruned_loss=0.03857, over 18511.00 frames. ], tot_loss[loss=0.1616, simple_loss=0.2518, pruned_loss=0.03569, over 3570281.15 frames. ], batch size: 47, lr: 4.91e-03, grad_scale: 4.0 +2023-03-09 16:28:01,899 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81472.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:28:07,571 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81477.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:28:17,386 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.729e+02 2.620e+02 2.957e+02 3.344e+02 7.866e+02, threshold=5.914e+02, percent-clipped=3.0 +2023-03-09 16:28:32,687 INFO [train.py:898] (2/4) Epoch 23, batch 1550, loss[loss=0.1443, simple_loss=0.2377, pruned_loss=0.02539, over 18388.00 frames. ], tot_loss[loss=0.161, simple_loss=0.2511, pruned_loss=0.03547, over 3580085.43 frames. ], batch size: 50, lr: 4.90e-03, grad_scale: 4.0 +2023-03-09 16:29:01,623 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3318, 5.3271, 4.9259, 5.2713, 5.2555, 4.6571, 5.1850, 4.9744], + device='cuda:2'), covar=tensor([0.0443, 0.0440, 0.1403, 0.0726, 0.0601, 0.0426, 0.0441, 0.0987], + device='cuda:2'), in_proj_covar=tensor([0.0498, 0.0569, 0.0703, 0.0440, 0.0459, 0.0519, 0.0546, 0.0681], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 16:29:31,014 INFO [train.py:898] (2/4) Epoch 23, batch 1600, loss[loss=0.1846, simple_loss=0.2678, pruned_loss=0.05072, over 13159.00 frames. ], tot_loss[loss=0.1613, simple_loss=0.2515, pruned_loss=0.03555, over 3584604.96 frames. ], batch size: 132, lr: 4.90e-03, grad_scale: 8.0 +2023-03-09 16:29:36,938 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-09 16:30:15,110 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.871e+02 2.795e+02 3.306e+02 4.129e+02 9.714e+02, threshold=6.611e+02, percent-clipped=7.0 +2023-03-09 16:30:29,179 INFO [train.py:898] (2/4) Epoch 23, batch 1650, loss[loss=0.1494, simple_loss=0.2364, pruned_loss=0.03117, over 18386.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2519, pruned_loss=0.03578, over 3573204.76 frames. ], batch size: 42, lr: 4.90e-03, grad_scale: 4.0 +2023-03-09 16:31:06,880 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-03-09 16:31:28,183 INFO [train.py:898] (2/4) Epoch 23, batch 1700, loss[loss=0.1795, simple_loss=0.2747, pruned_loss=0.04218, over 18290.00 frames. ], tot_loss[loss=0.1613, simple_loss=0.2515, pruned_loss=0.03555, over 3589026.21 frames. ], batch size: 57, lr: 4.90e-03, grad_scale: 4.0 +2023-03-09 16:31:35,757 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81655.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:31:35,933 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1177, 2.5731, 3.2163, 3.0505, 2.4639, 3.3802, 3.2414, 2.3905], + device='cuda:2'), covar=tensor([0.0497, 0.1249, 0.0483, 0.0473, 0.1410, 0.0358, 0.0726, 0.0986], + device='cuda:2'), in_proj_covar=tensor([0.0214, 0.0239, 0.0215, 0.0167, 0.0227, 0.0213, 0.0250, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 16:31:38,672 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81657.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:31:42,176 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3231, 5.2410, 5.5068, 5.5325, 5.1788, 6.0541, 5.6782, 5.3870], + device='cuda:2'), covar=tensor([0.1041, 0.0656, 0.0758, 0.0745, 0.1240, 0.0684, 0.0601, 0.1661], + device='cuda:2'), in_proj_covar=tensor([0.0363, 0.0290, 0.0315, 0.0316, 0.0331, 0.0432, 0.0285, 0.0422], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 16:31:53,580 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81670.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:32:09,006 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8964, 4.0698, 2.4811, 4.0359, 5.0820, 2.3623, 3.6873, 3.9917], + device='cuda:2'), covar=tensor([0.0189, 0.1268, 0.1786, 0.0738, 0.0140, 0.1584, 0.0816, 0.0779], + device='cuda:2'), in_proj_covar=tensor([0.0174, 0.0277, 0.0208, 0.0201, 0.0134, 0.0188, 0.0219, 0.0230], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 16:32:13,038 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.642e+02 2.453e+02 2.824e+02 3.386e+02 8.042e+02, threshold=5.649e+02, percent-clipped=1.0 +2023-03-09 16:32:26,757 INFO [train.py:898] (2/4) Epoch 23, batch 1750, loss[loss=0.1447, simple_loss=0.24, pruned_loss=0.0247, over 18324.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2512, pruned_loss=0.03549, over 3586782.59 frames. ], batch size: 54, lr: 4.90e-03, grad_scale: 4.0 +2023-03-09 16:32:35,440 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6902, 5.2337, 5.2088, 5.1582, 4.6984, 5.0783, 4.5195, 5.0678], + device='cuda:2'), covar=tensor([0.0281, 0.0301, 0.0199, 0.0456, 0.0433, 0.0267, 0.1131, 0.0337], + device='cuda:2'), in_proj_covar=tensor([0.0219, 0.0265, 0.0259, 0.0335, 0.0274, 0.0270, 0.0309, 0.0262], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 16:33:04,579 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81731.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 16:33:25,111 INFO [train.py:898] (2/4) Epoch 23, batch 1800, loss[loss=0.1557, simple_loss=0.2503, pruned_loss=0.03058, over 18555.00 frames. ], tot_loss[loss=0.1612, simple_loss=0.2512, pruned_loss=0.03556, over 3589528.35 frames. ], batch size: 54, lr: 4.90e-03, grad_scale: 2.0 +2023-03-09 16:33:52,334 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81772.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:34:10,548 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.038e+02 2.659e+02 3.003e+02 3.499e+02 8.184e+02, threshold=6.005e+02, percent-clipped=4.0 +2023-03-09 16:34:23,256 INFO [train.py:898] (2/4) Epoch 23, batch 1850, loss[loss=0.1444, simple_loss=0.2348, pruned_loss=0.027, over 18244.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.2505, pruned_loss=0.03534, over 3584041.21 frames. ], batch size: 45, lr: 4.90e-03, grad_scale: 2.0 +2023-03-09 16:34:35,646 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8152, 3.6512, 4.9296, 4.3814, 3.4625, 3.0838, 4.4535, 5.1259], + device='cuda:2'), covar=tensor([0.0753, 0.1371, 0.0232, 0.0394, 0.0860, 0.1087, 0.0365, 0.0328], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0278, 0.0160, 0.0183, 0.0192, 0.0192, 0.0197, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 16:34:48,396 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=81820.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:34:48,862 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-03-09 16:35:21,109 INFO [train.py:898] (2/4) Epoch 23, batch 1900, loss[loss=0.139, simple_loss=0.2289, pruned_loss=0.02453, over 18164.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2502, pruned_loss=0.03511, over 3590674.16 frames. ], batch size: 44, lr: 4.89e-03, grad_scale: 2.0 +2023-03-09 16:36:07,197 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.831e+02 2.605e+02 3.105e+02 3.631e+02 5.941e+02, threshold=6.209e+02, percent-clipped=0.0 +2023-03-09 16:36:20,014 INFO [train.py:898] (2/4) Epoch 23, batch 1950, loss[loss=0.1617, simple_loss=0.2549, pruned_loss=0.03421, over 18361.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2504, pruned_loss=0.03508, over 3590741.30 frames. ], batch size: 55, lr: 4.89e-03, grad_scale: 2.0 +2023-03-09 16:36:21,595 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5310, 2.8017, 2.5756, 2.8082, 3.6449, 3.5182, 3.1253, 2.8960], + device='cuda:2'), covar=tensor([0.0262, 0.0289, 0.0519, 0.0370, 0.0173, 0.0180, 0.0348, 0.0382], + device='cuda:2'), in_proj_covar=tensor([0.0141, 0.0139, 0.0162, 0.0161, 0.0134, 0.0120, 0.0155, 0.0160], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 16:37:17,995 INFO [train.py:898] (2/4) Epoch 23, batch 2000, loss[loss=0.1542, simple_loss=0.2325, pruned_loss=0.03791, over 18369.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.2509, pruned_loss=0.03537, over 3587676.69 frames. ], batch size: 42, lr: 4.89e-03, grad_scale: 4.0 +2023-03-09 16:37:23,120 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.48 vs. limit=2.0 +2023-03-09 16:37:23,365 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-09 16:37:25,064 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81955.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:37:27,472 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81957.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:37:29,640 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3645, 5.3895, 5.6709, 5.6929, 5.1674, 6.1376, 5.8783, 5.3879], + device='cuda:2'), covar=tensor([0.1255, 0.0584, 0.0839, 0.0747, 0.1678, 0.0813, 0.0738, 0.1874], + device='cuda:2'), in_proj_covar=tensor([0.0364, 0.0289, 0.0314, 0.0318, 0.0333, 0.0431, 0.0285, 0.0422], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 16:38:03,386 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81987.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:38:04,116 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.810e+02 2.510e+02 2.896e+02 3.401e+02 7.542e+02, threshold=5.791e+02, percent-clipped=1.0 +2023-03-09 16:38:17,091 INFO [train.py:898] (2/4) Epoch 23, batch 2050, loss[loss=0.1685, simple_loss=0.2673, pruned_loss=0.03487, over 18503.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.251, pruned_loss=0.03523, over 3583073.10 frames. ], batch size: 51, lr: 4.89e-03, grad_scale: 4.0 +2023-03-09 16:38:26,634 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82003.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:38:28,883 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82005.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:38:54,188 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82026.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 16:39:02,552 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-09 16:39:19,055 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82048.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:39:19,765 INFO [train.py:898] (2/4) Epoch 23, batch 2100, loss[loss=0.1427, simple_loss=0.2378, pruned_loss=0.02376, over 18358.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.2513, pruned_loss=0.0352, over 3586631.54 frames. ], batch size: 46, lr: 4.89e-03, grad_scale: 4.0 +2023-03-09 16:39:27,617 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82055.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:40:05,447 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.125e+02 2.645e+02 3.113e+02 3.776e+02 5.395e+02, threshold=6.226e+02, percent-clipped=0.0 +2023-03-09 16:40:18,026 INFO [train.py:898] (2/4) Epoch 23, batch 2150, loss[loss=0.1771, simple_loss=0.2705, pruned_loss=0.04182, over 18483.00 frames. ], tot_loss[loss=0.161, simple_loss=0.2513, pruned_loss=0.03539, over 3588631.15 frames. ], batch size: 53, lr: 4.89e-03, grad_scale: 4.0 +2023-03-09 16:40:38,322 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82116.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:41:16,330 INFO [train.py:898] (2/4) Epoch 23, batch 2200, loss[loss=0.1742, simple_loss=0.267, pruned_loss=0.04067, over 18255.00 frames. ], tot_loss[loss=0.1614, simple_loss=0.2517, pruned_loss=0.03552, over 3581694.62 frames. ], batch size: 60, lr: 4.88e-03, grad_scale: 4.0 +2023-03-09 16:41:21,971 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4549, 3.3097, 2.0537, 4.2839, 2.9846, 4.1150, 2.5180, 3.8481], + device='cuda:2'), covar=tensor([0.0722, 0.0906, 0.1621, 0.0565, 0.0926, 0.0312, 0.1191, 0.0431], + device='cuda:2'), in_proj_covar=tensor([0.0217, 0.0230, 0.0191, 0.0290, 0.0194, 0.0269, 0.0206, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 16:41:56,995 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9435, 3.9358, 3.9873, 3.8790, 3.9006, 3.9147, 3.9783, 4.0195], + device='cuda:2'), covar=tensor([0.0086, 0.0082, 0.0075, 0.0097, 0.0069, 0.0128, 0.0073, 0.0084], + device='cuda:2'), in_proj_covar=tensor([0.0096, 0.0071, 0.0076, 0.0095, 0.0076, 0.0105, 0.0090, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 16:42:02,195 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.746e+02 2.684e+02 3.246e+02 4.259e+02 8.856e+02, threshold=6.492e+02, percent-clipped=7.0 +2023-03-09 16:42:14,751 INFO [train.py:898] (2/4) Epoch 23, batch 2250, loss[loss=0.1575, simple_loss=0.2469, pruned_loss=0.03407, over 18262.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2512, pruned_loss=0.03548, over 3576144.58 frames. ], batch size: 47, lr: 4.88e-03, grad_scale: 4.0 +2023-03-09 16:42:22,169 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6214, 2.2014, 2.4850, 2.4988, 3.0430, 4.6861, 4.5451, 3.3590], + device='cuda:2'), covar=tensor([0.1975, 0.2768, 0.3222, 0.2109, 0.2612, 0.0257, 0.0391, 0.0882], + device='cuda:2'), in_proj_covar=tensor([0.0307, 0.0349, 0.0386, 0.0281, 0.0391, 0.0249, 0.0297, 0.0259], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 16:43:07,038 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-03-09 16:43:13,053 INFO [train.py:898] (2/4) Epoch 23, batch 2300, loss[loss=0.1543, simple_loss=0.2402, pruned_loss=0.03416, over 18368.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2517, pruned_loss=0.03561, over 3582721.93 frames. ], batch size: 42, lr: 4.88e-03, grad_scale: 4.0 +2023-03-09 16:43:59,042 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.010e+02 2.750e+02 3.270e+02 3.982e+02 6.063e+02, threshold=6.540e+02, percent-clipped=0.0 +2023-03-09 16:44:11,842 INFO [train.py:898] (2/4) Epoch 23, batch 2350, loss[loss=0.1666, simple_loss=0.258, pruned_loss=0.03755, over 18565.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2517, pruned_loss=0.03579, over 3588038.98 frames. ], batch size: 54, lr: 4.88e-03, grad_scale: 4.0 +2023-03-09 16:44:43,721 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82326.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:45:04,059 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82343.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:45:07,670 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82346.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:45:10,722 INFO [train.py:898] (2/4) Epoch 23, batch 2400, loss[loss=0.188, simple_loss=0.2834, pruned_loss=0.04631, over 18308.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2518, pruned_loss=0.03591, over 3588069.98 frames. ], batch size: 57, lr: 4.88e-03, grad_scale: 8.0 +2023-03-09 16:45:39,479 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82374.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:45:55,574 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.666e+02 2.631e+02 3.119e+02 3.737e+02 9.140e+02, threshold=6.238e+02, percent-clipped=2.0 +2023-03-09 16:45:58,356 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4726, 2.7576, 2.5650, 2.7251, 3.5974, 3.5509, 3.1033, 2.8011], + device='cuda:2'), covar=tensor([0.0226, 0.0315, 0.0521, 0.0423, 0.0169, 0.0136, 0.0374, 0.0399], + device='cuda:2'), in_proj_covar=tensor([0.0140, 0.0140, 0.0163, 0.0160, 0.0135, 0.0121, 0.0156, 0.0160], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 16:46:09,288 INFO [train.py:898] (2/4) Epoch 23, batch 2450, loss[loss=0.165, simple_loss=0.252, pruned_loss=0.03903, over 18143.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2518, pruned_loss=0.03558, over 3586602.39 frames. ], batch size: 62, lr: 4.88e-03, grad_scale: 8.0 +2023-03-09 16:46:18,900 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82407.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:46:23,284 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82411.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:46:37,852 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.13 vs. limit=5.0 +2023-03-09 16:46:50,883 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-09 16:47:08,010 INFO [train.py:898] (2/4) Epoch 23, batch 2500, loss[loss=0.1771, simple_loss=0.2802, pruned_loss=0.037, over 18285.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2519, pruned_loss=0.03584, over 3573511.09 frames. ], batch size: 54, lr: 4.88e-03, grad_scale: 8.0 +2023-03-09 16:47:52,904 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.741e+02 2.534e+02 3.083e+02 3.529e+02 5.828e+02, threshold=6.166e+02, percent-clipped=0.0 +2023-03-09 16:47:58,864 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82493.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 16:48:06,035 INFO [train.py:898] (2/4) Epoch 23, batch 2550, loss[loss=0.1562, simple_loss=0.246, pruned_loss=0.0332, over 18429.00 frames. ], tot_loss[loss=0.162, simple_loss=0.2519, pruned_loss=0.03601, over 3567873.20 frames. ], batch size: 48, lr: 4.87e-03, grad_scale: 8.0 +2023-03-09 16:48:31,851 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0633, 5.1973, 5.2493, 4.9192, 4.9282, 4.9418, 5.2639, 5.2719], + device='cuda:2'), covar=tensor([0.0072, 0.0058, 0.0046, 0.0102, 0.0056, 0.0184, 0.0067, 0.0084], + device='cuda:2'), in_proj_covar=tensor([0.0097, 0.0071, 0.0076, 0.0095, 0.0077, 0.0106, 0.0090, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 16:48:35,218 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4678, 5.9225, 5.4910, 5.7628, 5.5727, 5.4115, 6.0471, 5.9637], + device='cuda:2'), covar=tensor([0.1152, 0.0844, 0.0463, 0.0712, 0.1314, 0.0670, 0.0504, 0.0727], + device='cuda:2'), in_proj_covar=tensor([0.0620, 0.0541, 0.0386, 0.0566, 0.0763, 0.0556, 0.0773, 0.0584], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 16:48:39,032 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.98 vs. limit=5.0 +2023-03-09 16:49:03,667 INFO [train.py:898] (2/4) Epoch 23, batch 2600, loss[loss=0.1416, simple_loss=0.2221, pruned_loss=0.03057, over 18386.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2518, pruned_loss=0.03589, over 3573680.56 frames. ], batch size: 43, lr: 4.87e-03, grad_scale: 8.0 +2023-03-09 16:49:10,298 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82554.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 16:49:21,079 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-09 16:49:27,002 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82569.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:49:35,117 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4606, 2.7104, 3.7963, 3.4116, 2.5589, 3.9692, 3.6406, 2.5911], + device='cuda:2'), covar=tensor([0.0463, 0.1367, 0.0297, 0.0442, 0.1455, 0.0234, 0.0537, 0.0991], + device='cuda:2'), in_proj_covar=tensor([0.0214, 0.0239, 0.0219, 0.0167, 0.0228, 0.0213, 0.0250, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 16:49:49,705 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.492e+02 2.620e+02 3.051e+02 3.714e+02 9.693e+02, threshold=6.103e+02, percent-clipped=7.0 +2023-03-09 16:50:01,838 INFO [train.py:898] (2/4) Epoch 23, batch 2650, loss[loss=0.1783, simple_loss=0.2668, pruned_loss=0.0449, over 18475.00 frames. ], tot_loss[loss=0.1619, simple_loss=0.2521, pruned_loss=0.03587, over 3575539.66 frames. ], batch size: 59, lr: 4.87e-03, grad_scale: 4.0 +2023-03-09 16:50:38,541 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82630.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:50:54,268 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82643.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:51:00,472 INFO [train.py:898] (2/4) Epoch 23, batch 2700, loss[loss=0.1438, simple_loss=0.2251, pruned_loss=0.03124, over 18384.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2512, pruned_loss=0.03549, over 3585926.33 frames. ], batch size: 42, lr: 4.87e-03, grad_scale: 4.0 +2023-03-09 16:51:34,190 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8685, 4.6702, 2.5766, 4.5366, 4.4655, 4.7069, 4.5129, 2.5696], + device='cuda:2'), covar=tensor([0.0220, 0.0091, 0.0846, 0.0119, 0.0088, 0.0094, 0.0115, 0.1033], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0082, 0.0097, 0.0097, 0.0088, 0.0078, 0.0086, 0.0097], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 16:51:46,590 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.924e+02 2.721e+02 3.267e+02 3.832e+02 9.086e+02, threshold=6.534e+02, percent-clipped=2.0 +2023-03-09 16:51:48,252 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8917, 3.6360, 4.9566, 4.4523, 3.2481, 3.0140, 4.4308, 5.1640], + device='cuda:2'), covar=tensor([0.0798, 0.1422, 0.0204, 0.0394, 0.1011, 0.1221, 0.0434, 0.0258], + device='cuda:2'), in_proj_covar=tensor([0.0151, 0.0279, 0.0160, 0.0185, 0.0194, 0.0193, 0.0198, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 16:51:49,671 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82691.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:51:58,556 INFO [train.py:898] (2/4) Epoch 23, batch 2750, loss[loss=0.18, simple_loss=0.2709, pruned_loss=0.04449, over 18495.00 frames. ], tot_loss[loss=0.1613, simple_loss=0.2513, pruned_loss=0.03564, over 3583536.15 frames. ], batch size: 59, lr: 4.87e-03, grad_scale: 4.0 +2023-03-09 16:52:02,231 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82702.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:52:13,037 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82711.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:52:38,562 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6430, 2.7379, 4.2618, 3.6956, 2.6134, 4.4242, 3.8388, 2.7785], + device='cuda:2'), covar=tensor([0.0515, 0.1601, 0.0289, 0.0459, 0.1704, 0.0252, 0.0595, 0.0990], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0238, 0.0218, 0.0166, 0.0227, 0.0213, 0.0250, 0.0196], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 16:52:55,994 INFO [train.py:898] (2/4) Epoch 23, batch 2800, loss[loss=0.1984, simple_loss=0.2826, pruned_loss=0.05706, over 12798.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.2508, pruned_loss=0.03521, over 3595122.28 frames. ], batch size: 129, lr: 4.87e-03, grad_scale: 8.0 +2023-03-09 16:53:08,590 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=82759.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:53:42,298 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.820e+02 2.582e+02 3.010e+02 3.495e+02 9.650e+02, threshold=6.020e+02, percent-clipped=2.0 +2023-03-09 16:53:54,215 INFO [train.py:898] (2/4) Epoch 23, batch 2850, loss[loss=0.1572, simple_loss=0.252, pruned_loss=0.03116, over 18648.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2507, pruned_loss=0.03516, over 3597496.74 frames. ], batch size: 52, lr: 4.87e-03, grad_scale: 8.0 +2023-03-09 16:54:52,997 INFO [train.py:898] (2/4) Epoch 23, batch 2900, loss[loss=0.1699, simple_loss=0.269, pruned_loss=0.03543, over 18494.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.2511, pruned_loss=0.03506, over 3595061.70 frames. ], batch size: 53, lr: 4.86e-03, grad_scale: 8.0 +2023-03-09 16:54:53,250 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82849.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 16:55:39,405 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.765e+02 2.643e+02 2.969e+02 3.593e+02 6.828e+02, threshold=5.939e+02, percent-clipped=2.0 +2023-03-09 16:55:51,348 INFO [train.py:898] (2/4) Epoch 23, batch 2950, loss[loss=0.1432, simple_loss=0.2333, pruned_loss=0.02653, over 18268.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.251, pruned_loss=0.03515, over 3583728.15 frames. ], batch size: 47, lr: 4.86e-03, grad_scale: 8.0 +2023-03-09 16:56:22,404 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82925.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:56:35,172 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8343, 4.3377, 4.3777, 3.2513, 3.6143, 3.4711, 2.4666, 2.2993], + device='cuda:2'), covar=tensor([0.0229, 0.0182, 0.0093, 0.0354, 0.0347, 0.0240, 0.0794, 0.1021], + device='cuda:2'), in_proj_covar=tensor([0.0072, 0.0062, 0.0065, 0.0070, 0.0091, 0.0068, 0.0078, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 16:56:49,688 INFO [train.py:898] (2/4) Epoch 23, batch 3000, loss[loss=0.1562, simple_loss=0.2393, pruned_loss=0.03659, over 17742.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.2511, pruned_loss=0.03506, over 3589920.04 frames. ], batch size: 39, lr: 4.86e-03, grad_scale: 8.0 +2023-03-09 16:56:49,688 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 16:56:56,239 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8813, 2.4877, 2.2634, 2.3940, 3.0568, 3.0328, 2.8233, 2.5880], + device='cuda:2'), covar=tensor([0.0215, 0.0301, 0.0591, 0.0434, 0.0218, 0.0179, 0.0406, 0.0401], + device='cuda:2'), in_proj_covar=tensor([0.0140, 0.0139, 0.0163, 0.0160, 0.0133, 0.0120, 0.0156, 0.0159], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 16:57:01,575 INFO [train.py:932] (2/4) Epoch 23, validation: loss=0.1503, simple_loss=0.2492, pruned_loss=0.02572, over 944034.00 frames. +2023-03-09 16:57:01,576 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 16:57:19,209 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82963.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:57:44,138 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7671, 3.7714, 3.5798, 3.2417, 3.5403, 2.8552, 2.8968, 3.7926], + device='cuda:2'), covar=tensor([0.0062, 0.0094, 0.0080, 0.0153, 0.0091, 0.0195, 0.0206, 0.0061], + device='cuda:2'), in_proj_covar=tensor([0.0145, 0.0165, 0.0137, 0.0191, 0.0146, 0.0181, 0.0185, 0.0125], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 16:57:48,596 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.819e+02 2.491e+02 2.993e+02 3.582e+02 9.758e+02, threshold=5.985e+02, percent-clipped=2.0 +2023-03-09 16:58:00,507 INFO [train.py:898] (2/4) Epoch 23, batch 3050, loss[loss=0.1479, simple_loss=0.2355, pruned_loss=0.03017, over 18425.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.251, pruned_loss=0.03498, over 3593761.25 frames. ], batch size: 48, lr: 4.86e-03, grad_scale: 8.0 +2023-03-09 16:58:04,574 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83002.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:58:20,222 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9437, 5.4498, 5.4413, 5.4331, 4.9070, 5.3471, 4.8046, 5.3151], + device='cuda:2'), covar=tensor([0.0262, 0.0263, 0.0184, 0.0403, 0.0391, 0.0222, 0.1010, 0.0304], + device='cuda:2'), in_proj_covar=tensor([0.0220, 0.0265, 0.0259, 0.0337, 0.0277, 0.0274, 0.0309, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 16:58:30,353 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83024.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:58:45,576 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.80 vs. limit=2.0 +2023-03-09 16:58:59,554 INFO [train.py:898] (2/4) Epoch 23, batch 3100, loss[loss=0.1619, simple_loss=0.249, pruned_loss=0.03742, over 18269.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2507, pruned_loss=0.03484, over 3595739.18 frames. ], batch size: 47, lr: 4.86e-03, grad_scale: 4.0 +2023-03-09 16:59:00,888 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83050.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 16:59:14,415 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5885, 5.5564, 5.1926, 5.4737, 5.5173, 4.9014, 5.4306, 5.1564], + device='cuda:2'), covar=tensor([0.0414, 0.0406, 0.1237, 0.0881, 0.0559, 0.0465, 0.0385, 0.0985], + device='cuda:2'), in_proj_covar=tensor([0.0499, 0.0569, 0.0710, 0.0439, 0.0458, 0.0518, 0.0546, 0.0685], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0004, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 16:59:46,643 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.874e+02 2.570e+02 3.037e+02 3.721e+02 1.351e+03, threshold=6.073e+02, percent-clipped=1.0 +2023-03-09 16:59:57,767 INFO [train.py:898] (2/4) Epoch 23, batch 3150, loss[loss=0.1503, simple_loss=0.2418, pruned_loss=0.02937, over 18620.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.251, pruned_loss=0.03488, over 3596426.65 frames. ], batch size: 52, lr: 4.86e-03, grad_scale: 4.0 +2023-03-09 17:00:55,291 INFO [train.py:898] (2/4) Epoch 23, batch 3200, loss[loss=0.1497, simple_loss=0.2393, pruned_loss=0.03001, over 18481.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2511, pruned_loss=0.03502, over 3602267.42 frames. ], batch size: 47, lr: 4.86e-03, grad_scale: 8.0 +2023-03-09 17:00:55,662 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83149.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 17:01:25,315 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7646, 3.5068, 2.4243, 4.5866, 3.2882, 4.4387, 2.6651, 4.1171], + device='cuda:2'), covar=tensor([0.0591, 0.0855, 0.1488, 0.0426, 0.0790, 0.0322, 0.1138, 0.0413], + device='cuda:2'), in_proj_covar=tensor([0.0216, 0.0227, 0.0190, 0.0288, 0.0192, 0.0267, 0.0204, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 17:01:43,024 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.642e+02 2.631e+02 3.256e+02 4.142e+02 1.137e+03, threshold=6.512e+02, percent-clipped=5.0 +2023-03-09 17:01:49,053 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1781, 5.2706, 5.4863, 5.5713, 5.0970, 5.9988, 5.6170, 5.1460], + device='cuda:2'), covar=tensor([0.1283, 0.0624, 0.0728, 0.0685, 0.1375, 0.0685, 0.0661, 0.1858], + device='cuda:2'), in_proj_covar=tensor([0.0367, 0.0293, 0.0316, 0.0321, 0.0335, 0.0432, 0.0290, 0.0424], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 17:01:50,325 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83196.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:01:51,141 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83197.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 17:01:53,672 INFO [train.py:898] (2/4) Epoch 23, batch 3250, loss[loss=0.1514, simple_loss=0.2431, pruned_loss=0.02987, over 18288.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.251, pruned_loss=0.03522, over 3599772.53 frames. ], batch size: 49, lr: 4.85e-03, grad_scale: 8.0 +2023-03-09 17:02:25,228 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83225.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:02:51,887 INFO [train.py:898] (2/4) Epoch 23, batch 3300, loss[loss=0.1405, simple_loss=0.2202, pruned_loss=0.03044, over 18265.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2499, pruned_loss=0.03505, over 3607951.08 frames. ], batch size: 45, lr: 4.85e-03, grad_scale: 8.0 +2023-03-09 17:03:02,552 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83257.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:03:21,862 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83273.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:03:34,394 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83284.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:03:40,705 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.708e+02 2.590e+02 3.037e+02 3.666e+02 6.079e+02, threshold=6.073e+02, percent-clipped=0.0 +2023-03-09 17:03:50,690 INFO [train.py:898] (2/4) Epoch 23, batch 3350, loss[loss=0.1569, simple_loss=0.247, pruned_loss=0.03335, over 18400.00 frames. ], tot_loss[loss=0.1592, simple_loss=0.2491, pruned_loss=0.03463, over 3606490.67 frames. ], batch size: 48, lr: 4.85e-03, grad_scale: 8.0 +2023-03-09 17:04:00,824 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-09 17:04:14,309 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83319.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:04:45,108 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83345.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:04:49,302 INFO [train.py:898] (2/4) Epoch 23, batch 3400, loss[loss=0.1416, simple_loss=0.2213, pruned_loss=0.03092, over 18516.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2502, pruned_loss=0.03524, over 3604310.77 frames. ], batch size: 44, lr: 4.85e-03, grad_scale: 8.0 +2023-03-09 17:05:37,375 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.783e+02 2.666e+02 3.258e+02 4.159e+02 1.161e+03, threshold=6.516e+02, percent-clipped=10.0 +2023-03-09 17:05:47,437 INFO [train.py:898] (2/4) Epoch 23, batch 3450, loss[loss=0.1366, simple_loss=0.2232, pruned_loss=0.02499, over 18424.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.2507, pruned_loss=0.03523, over 3600355.97 frames. ], batch size: 42, lr: 4.85e-03, grad_scale: 8.0 +2023-03-09 17:06:03,953 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.12 vs. limit=5.0 +2023-03-09 17:06:45,594 INFO [train.py:898] (2/4) Epoch 23, batch 3500, loss[loss=0.1413, simple_loss=0.2251, pruned_loss=0.02873, over 18572.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2504, pruned_loss=0.03503, over 3595397.39 frames. ], batch size: 45, lr: 4.85e-03, grad_scale: 8.0 +2023-03-09 17:07:31,223 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.962e+02 2.487e+02 3.071e+02 3.696e+02 9.873e+02, threshold=6.142e+02, percent-clipped=4.0 +2023-03-09 17:07:41,351 INFO [train.py:898] (2/4) Epoch 23, batch 3550, loss[loss=0.1419, simple_loss=0.2243, pruned_loss=0.02974, over 18146.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2498, pruned_loss=0.03475, over 3601574.61 frames. ], batch size: 44, lr: 4.85e-03, grad_scale: 8.0 +2023-03-09 17:08:21,820 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6395, 3.6882, 3.4360, 3.1406, 3.3273, 2.6576, 2.7347, 3.6379], + device='cuda:2'), covar=tensor([0.0064, 0.0098, 0.0085, 0.0132, 0.0105, 0.0196, 0.0203, 0.0072], + device='cuda:2'), in_proj_covar=tensor([0.0145, 0.0166, 0.0138, 0.0191, 0.0147, 0.0182, 0.0186, 0.0126], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 17:08:35,289 INFO [train.py:898] (2/4) Epoch 23, batch 3600, loss[loss=0.1792, simple_loss=0.2733, pruned_loss=0.04259, over 16176.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2497, pruned_loss=0.0349, over 3595645.13 frames. ], batch size: 95, lr: 4.84e-03, grad_scale: 8.0 +2023-03-09 17:08:38,662 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83552.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:09:39,854 INFO [train.py:898] (2/4) Epoch 24, batch 0, loss[loss=0.1806, simple_loss=0.2785, pruned_loss=0.04139, over 18364.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2785, pruned_loss=0.04139, over 18364.00 frames. ], batch size: 56, lr: 4.74e-03, grad_scale: 8.0 +2023-03-09 17:09:39,854 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 17:09:49,107 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8871, 2.3026, 3.0087, 2.6957, 2.2816, 3.1179, 3.0316, 2.2151], + device='cuda:2'), covar=tensor([0.0641, 0.1484, 0.0604, 0.0625, 0.1669, 0.0422, 0.0792, 0.1060], + device='cuda:2'), in_proj_covar=tensor([0.0212, 0.0238, 0.0219, 0.0166, 0.0226, 0.0212, 0.0251, 0.0195], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 17:09:51,561 INFO [train.py:932] (2/4) Epoch 24, validation: loss=0.1502, simple_loss=0.2499, pruned_loss=0.02529, over 944034.00 frames. +2023-03-09 17:09:51,562 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 17:10:00,559 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.679e+02 2.611e+02 3.172e+02 4.204e+02 1.377e+03, threshold=6.343e+02, percent-clipped=6.0 +2023-03-09 17:10:34,160 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83619.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:10:50,366 INFO [train.py:898] (2/4) Epoch 24, batch 50, loss[loss=0.1381, simple_loss=0.2247, pruned_loss=0.02574, over 18416.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.2436, pruned_loss=0.03241, over 820506.58 frames. ], batch size: 43, lr: 4.74e-03, grad_scale: 8.0 +2023-03-09 17:10:58,619 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83640.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:11:18,223 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-03-09 17:11:30,057 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83667.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:11:48,508 INFO [train.py:898] (2/4) Epoch 24, batch 100, loss[loss=0.1538, simple_loss=0.2493, pruned_loss=0.0292, over 18475.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2487, pruned_loss=0.03459, over 1424650.31 frames. ], batch size: 59, lr: 4.74e-03, grad_scale: 8.0 +2023-03-09 17:11:58,112 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.623e+02 2.468e+02 2.945e+02 3.630e+02 7.467e+02, threshold=5.891e+02, percent-clipped=1.0 +2023-03-09 17:12:46,018 INFO [train.py:898] (2/4) Epoch 24, batch 150, loss[loss=0.1382, simple_loss=0.221, pruned_loss=0.02774, over 18154.00 frames. ], tot_loss[loss=0.158, simple_loss=0.2475, pruned_loss=0.03424, over 1903929.04 frames. ], batch size: 44, lr: 4.73e-03, grad_scale: 8.0 +2023-03-09 17:13:18,121 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83760.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:13:30,021 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.51 vs. limit=5.0 +2023-03-09 17:13:39,289 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-09 17:13:43,908 INFO [train.py:898] (2/4) Epoch 24, batch 200, loss[loss=0.1656, simple_loss=0.2648, pruned_loss=0.03318, over 18351.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2491, pruned_loss=0.03479, over 2280970.35 frames. ], batch size: 56, lr: 4.73e-03, grad_scale: 4.0 +2023-03-09 17:13:53,810 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.895e+02 2.575e+02 2.989e+02 3.608e+02 5.254e+02, threshold=5.979e+02, percent-clipped=0.0 +2023-03-09 17:14:28,998 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83821.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:14:31,431 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.2731, 2.7892, 2.0803, 2.6751, 3.3235, 3.2871, 2.9044, 2.8169], + device='cuda:2'), covar=tensor([0.0186, 0.0221, 0.0728, 0.0320, 0.0151, 0.0148, 0.0352, 0.0276], + device='cuda:2'), in_proj_covar=tensor([0.0142, 0.0141, 0.0165, 0.0162, 0.0135, 0.0121, 0.0158, 0.0161], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 17:14:42,255 INFO [train.py:898] (2/4) Epoch 24, batch 250, loss[loss=0.1593, simple_loss=0.2557, pruned_loss=0.03143, over 17051.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2494, pruned_loss=0.03503, over 2569642.21 frames. ], batch size: 78, lr: 4.73e-03, grad_scale: 4.0 +2023-03-09 17:14:56,864 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0985, 5.5511, 5.5571, 5.5710, 5.0266, 5.4157, 4.9220, 5.4085], + device='cuda:2'), covar=tensor([0.0237, 0.0254, 0.0167, 0.0391, 0.0399, 0.0247, 0.0999, 0.0318], + device='cuda:2'), in_proj_covar=tensor([0.0221, 0.0266, 0.0259, 0.0338, 0.0278, 0.0274, 0.0310, 0.0266], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 17:15:04,759 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83852.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:15:40,423 INFO [train.py:898] (2/4) Epoch 24, batch 300, loss[loss=0.1692, simple_loss=0.2663, pruned_loss=0.03604, over 17830.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.2497, pruned_loss=0.03499, over 2793380.88 frames. ], batch size: 70, lr: 4.73e-03, grad_scale: 4.0 +2023-03-09 17:15:50,703 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.883e+02 2.677e+02 3.133e+02 3.610e+02 5.796e+02, threshold=6.266e+02, percent-clipped=0.0 +2023-03-09 17:16:00,994 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83900.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:16:25,850 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6941, 2.9907, 2.6323, 2.9652, 3.7707, 3.6649, 3.2646, 3.0546], + device='cuda:2'), covar=tensor([0.0195, 0.0305, 0.0588, 0.0346, 0.0156, 0.0149, 0.0342, 0.0343], + device='cuda:2'), in_proj_covar=tensor([0.0142, 0.0141, 0.0165, 0.0162, 0.0135, 0.0121, 0.0157, 0.0161], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 17:16:38,586 INFO [train.py:898] (2/4) Epoch 24, batch 350, loss[loss=0.1655, simple_loss=0.2534, pruned_loss=0.03879, over 18268.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2491, pruned_loss=0.03499, over 2982214.52 frames. ], batch size: 49, lr: 4.73e-03, grad_scale: 2.0 +2023-03-09 17:16:46,910 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83940.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:16:47,220 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.00 vs. limit=5.0 +2023-03-09 17:16:56,923 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83949.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:17:34,101 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7274, 3.6476, 5.2811, 3.1758, 4.5437, 2.5672, 3.0187, 1.9200], + device='cuda:2'), covar=tensor([0.1324, 0.1010, 0.0127, 0.0883, 0.0535, 0.2601, 0.2838, 0.2277], + device='cuda:2'), in_proj_covar=tensor([0.0223, 0.0246, 0.0206, 0.0202, 0.0259, 0.0274, 0.0328, 0.0240], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 17:17:36,995 INFO [train.py:898] (2/4) Epoch 24, batch 400, loss[loss=0.1619, simple_loss=0.2601, pruned_loss=0.03184, over 17592.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.2482, pruned_loss=0.03471, over 3121136.39 frames. ], batch size: 70, lr: 4.73e-03, grad_scale: 4.0 +2023-03-09 17:17:39,568 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83985.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:17:42,830 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=83988.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:17:48,086 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.909e+02 2.479e+02 2.899e+02 3.539e+02 5.465e+02, threshold=5.798e+02, percent-clipped=0.0 +2023-03-09 17:18:13,521 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84010.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 17:18:39,725 INFO [train.py:898] (2/4) Epoch 24, batch 450, loss[loss=0.1392, simple_loss=0.2222, pruned_loss=0.02812, over 18264.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.249, pruned_loss=0.03519, over 3222804.01 frames. ], batch size: 45, lr: 4.73e-03, grad_scale: 4.0 +2023-03-09 17:18:53,149 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-09 17:18:55,082 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84046.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:19:17,588 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7495, 2.9880, 2.6534, 2.9197, 3.7542, 3.6432, 3.2973, 3.0783], + device='cuda:2'), covar=tensor([0.0172, 0.0298, 0.0553, 0.0375, 0.0163, 0.0158, 0.0352, 0.0363], + device='cuda:2'), in_proj_covar=tensor([0.0142, 0.0141, 0.0165, 0.0163, 0.0136, 0.0121, 0.0158, 0.0161], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 17:19:38,436 INFO [train.py:898] (2/4) Epoch 24, batch 500, loss[loss=0.1693, simple_loss=0.2644, pruned_loss=0.03708, over 17805.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.2497, pruned_loss=0.03524, over 3301585.03 frames. ], batch size: 70, lr: 4.73e-03, grad_scale: 4.0 +2023-03-09 17:19:49,797 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.687e+02 2.790e+02 3.387e+02 4.220e+02 9.031e+02, threshold=6.774e+02, percent-clipped=2.0 +2023-03-09 17:19:54,565 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5287, 5.4673, 5.6891, 5.6950, 5.4106, 6.2177, 5.8906, 5.5088], + device='cuda:2'), covar=tensor([0.0976, 0.0622, 0.0662, 0.0761, 0.1338, 0.0663, 0.0669, 0.1638], + device='cuda:2'), in_proj_covar=tensor([0.0365, 0.0293, 0.0316, 0.0321, 0.0331, 0.0430, 0.0290, 0.0423], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 17:19:54,658 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84097.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:20:05,439 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8727, 5.1893, 2.4030, 5.0342, 4.8940, 5.1482, 5.0003, 2.3636], + device='cuda:2'), covar=tensor([0.0235, 0.0059, 0.0966, 0.0084, 0.0077, 0.0076, 0.0093, 0.1141], + device='cuda:2'), in_proj_covar=tensor([0.0091, 0.0082, 0.0098, 0.0097, 0.0088, 0.0078, 0.0087, 0.0098], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 17:20:16,915 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84116.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:20:36,462 INFO [train.py:898] (2/4) Epoch 24, batch 550, loss[loss=0.1694, simple_loss=0.2568, pruned_loss=0.04106, over 18364.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.2499, pruned_loss=0.03515, over 3360917.65 frames. ], batch size: 56, lr: 4.72e-03, grad_scale: 4.0 +2023-03-09 17:21:05,017 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84158.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:21:33,938 INFO [train.py:898] (2/4) Epoch 24, batch 600, loss[loss=0.1554, simple_loss=0.2548, pruned_loss=0.02806, over 17826.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2502, pruned_loss=0.03491, over 3419229.72 frames. ], batch size: 70, lr: 4.72e-03, grad_scale: 4.0 +2023-03-09 17:21:45,737 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.833e+02 2.553e+02 3.031e+02 3.741e+02 6.860e+02, threshold=6.062e+02, percent-clipped=1.0 +2023-03-09 17:21:53,175 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84199.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:22:32,629 INFO [train.py:898] (2/4) Epoch 24, batch 650, loss[loss=0.1758, simple_loss=0.2719, pruned_loss=0.03989, over 18083.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.25, pruned_loss=0.0351, over 3452000.74 frames. ], batch size: 62, lr: 4.72e-03, grad_scale: 4.0 +2023-03-09 17:22:36,508 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84236.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:22:37,056 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-09 17:23:04,146 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84260.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:23:12,782 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1227, 5.3492, 5.4030, 5.4531, 5.1308, 5.9610, 5.5900, 5.3226], + device='cuda:2'), covar=tensor([0.1156, 0.0621, 0.0786, 0.0789, 0.1335, 0.0672, 0.0681, 0.1523], + device='cuda:2'), in_proj_covar=tensor([0.0370, 0.0297, 0.0319, 0.0324, 0.0335, 0.0434, 0.0291, 0.0425], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 17:23:30,944 INFO [train.py:898] (2/4) Epoch 24, batch 700, loss[loss=0.1524, simple_loss=0.2513, pruned_loss=0.02674, over 17125.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.25, pruned_loss=0.03516, over 3476150.83 frames. ], batch size: 78, lr: 4.72e-03, grad_scale: 4.0 +2023-03-09 17:23:42,562 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.851e+02 2.577e+02 2.948e+02 3.805e+02 8.453e+02, threshold=5.897e+02, percent-clipped=4.0 +2023-03-09 17:23:47,505 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84297.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:23:54,328 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6666, 3.0478, 4.2760, 3.6345, 2.7502, 4.5307, 3.9249, 2.8418], + device='cuda:2'), covar=tensor([0.0514, 0.1391, 0.0321, 0.0478, 0.1611, 0.0231, 0.0574, 0.1020], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0242, 0.0220, 0.0168, 0.0227, 0.0216, 0.0254, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 17:23:56,346 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84305.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 17:24:06,599 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8641, 3.1785, 4.5828, 3.9354, 3.0294, 4.8445, 4.0816, 3.0980], + device='cuda:2'), covar=tensor([0.0490, 0.1457, 0.0307, 0.0443, 0.1466, 0.0216, 0.0609, 0.0953], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0243, 0.0221, 0.0168, 0.0227, 0.0216, 0.0255, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 17:24:29,575 INFO [train.py:898] (2/4) Epoch 24, batch 750, loss[loss=0.1719, simple_loss=0.2636, pruned_loss=0.04008, over 18253.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.25, pruned_loss=0.03514, over 3498348.66 frames. ], batch size: 60, lr: 4.72e-03, grad_scale: 4.0 +2023-03-09 17:24:33,191 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84336.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:24:38,587 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84341.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:25:27,069 INFO [train.py:898] (2/4) Epoch 24, batch 800, loss[loss=0.1779, simple_loss=0.2687, pruned_loss=0.04354, over 18479.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2504, pruned_loss=0.03502, over 3518913.66 frames. ], batch size: 59, lr: 4.72e-03, grad_scale: 8.0 +2023-03-09 17:25:39,054 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.586e+02 2.623e+02 3.073e+02 3.761e+02 8.070e+02, threshold=6.147e+02, percent-clipped=7.0 +2023-03-09 17:25:43,859 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84397.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:26:05,756 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84416.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:26:25,010 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4244, 3.2304, 2.2382, 4.1255, 2.8464, 3.8270, 2.4082, 3.6303], + device='cuda:2'), covar=tensor([0.0611, 0.0852, 0.1425, 0.0528, 0.0847, 0.0349, 0.1205, 0.0439], + device='cuda:2'), in_proj_covar=tensor([0.0220, 0.0230, 0.0193, 0.0292, 0.0195, 0.0271, 0.0206, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 17:26:25,763 INFO [train.py:898] (2/4) Epoch 24, batch 850, loss[loss=0.1655, simple_loss=0.2629, pruned_loss=0.034, over 17724.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2504, pruned_loss=0.03503, over 3529506.65 frames. ], batch size: 70, lr: 4.72e-03, grad_scale: 8.0 +2023-03-09 17:26:36,823 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-09 17:26:49,152 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84453.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:27:01,412 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84464.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:27:23,392 INFO [train.py:898] (2/4) Epoch 24, batch 900, loss[loss=0.1403, simple_loss=0.2312, pruned_loss=0.02476, over 18259.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2505, pruned_loss=0.03508, over 3541784.96 frames. ], batch size: 45, lr: 4.71e-03, grad_scale: 8.0 +2023-03-09 17:27:35,100 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.772e+02 2.570e+02 3.063e+02 4.034e+02 9.733e+02, threshold=6.126e+02, percent-clipped=4.0 +2023-03-09 17:28:11,988 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84525.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:28:21,161 INFO [train.py:898] (2/4) Epoch 24, batch 950, loss[loss=0.1676, simple_loss=0.2601, pruned_loss=0.03759, over 17096.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2508, pruned_loss=0.03485, over 3565231.96 frames. ], batch size: 78, lr: 4.71e-03, grad_scale: 8.0 +2023-03-09 17:28:47,308 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84555.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:29:19,840 INFO [train.py:898] (2/4) Epoch 24, batch 1000, loss[loss=0.2153, simple_loss=0.2907, pruned_loss=0.06991, over 12680.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2507, pruned_loss=0.03496, over 3569817.72 frames. ], batch size: 129, lr: 4.71e-03, grad_scale: 8.0 +2023-03-09 17:29:24,258 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84586.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:29:30,781 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84592.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:29:31,651 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.810e+02 2.603e+02 3.017e+02 3.476e+02 5.470e+02, threshold=6.035e+02, percent-clipped=0.0 +2023-03-09 17:29:46,398 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84605.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:29:58,496 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.56 vs. limit=2.0 +2023-03-09 17:30:11,656 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7465, 3.7896, 3.5497, 3.1945, 3.4397, 2.8090, 2.8433, 3.7654], + device='cuda:2'), covar=tensor([0.0067, 0.0085, 0.0080, 0.0143, 0.0100, 0.0201, 0.0208, 0.0065], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0168, 0.0140, 0.0192, 0.0149, 0.0183, 0.0188, 0.0126], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 17:30:17,819 INFO [train.py:898] (2/4) Epoch 24, batch 1050, loss[loss=0.1621, simple_loss=0.2571, pruned_loss=0.03352, over 17997.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2502, pruned_loss=0.03494, over 3574817.27 frames. ], batch size: 65, lr: 4.71e-03, grad_scale: 8.0 +2023-03-09 17:30:18,158 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84633.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:30:27,831 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84641.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:30:33,609 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0315, 3.8083, 5.1680, 4.5799, 3.6171, 3.2438, 4.5762, 5.3241], + device='cuda:2'), covar=tensor([0.0714, 0.1440, 0.0175, 0.0362, 0.0839, 0.1139, 0.0345, 0.0220], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0278, 0.0163, 0.0184, 0.0194, 0.0194, 0.0198, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 17:30:41,771 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84653.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:31:16,087 INFO [train.py:898] (2/4) Epoch 24, batch 1100, loss[loss=0.1562, simple_loss=0.2537, pruned_loss=0.02941, over 18386.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.2502, pruned_loss=0.03477, over 3585203.74 frames. ], batch size: 52, lr: 4.71e-03, grad_scale: 8.0 +2023-03-09 17:31:23,089 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84689.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:31:27,968 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84692.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:31:28,857 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.674e+02 2.619e+02 3.034e+02 3.649e+02 7.124e+02, threshold=6.067e+02, percent-clipped=2.0 +2023-03-09 17:31:30,471 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84694.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:31:49,094 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7839, 3.6443, 5.0684, 3.0254, 4.4307, 2.6377, 3.1515, 1.7774], + device='cuda:2'), covar=tensor([0.1237, 0.0889, 0.0128, 0.0905, 0.0475, 0.2546, 0.2459, 0.2221], + device='cuda:2'), in_proj_covar=tensor([0.0223, 0.0245, 0.0205, 0.0201, 0.0259, 0.0273, 0.0328, 0.0239], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 17:32:14,546 INFO [train.py:898] (2/4) Epoch 24, batch 1150, loss[loss=0.1529, simple_loss=0.2335, pruned_loss=0.03617, over 18501.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.2503, pruned_loss=0.03497, over 3594658.17 frames. ], batch size: 47, lr: 4.71e-03, grad_scale: 8.0 +2023-03-09 17:32:36,174 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7061, 3.0810, 4.3740, 3.7168, 2.7674, 4.5165, 3.9533, 2.8900], + device='cuda:2'), covar=tensor([0.0483, 0.1294, 0.0325, 0.0477, 0.1537, 0.0244, 0.0549, 0.0955], + device='cuda:2'), in_proj_covar=tensor([0.0214, 0.0242, 0.0220, 0.0169, 0.0227, 0.0216, 0.0254, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 17:32:38,328 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84753.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:33:06,564 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-09 17:33:12,318 INFO [train.py:898] (2/4) Epoch 24, batch 1200, loss[loss=0.144, simple_loss=0.2305, pruned_loss=0.02874, over 18560.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2501, pruned_loss=0.03498, over 3601849.81 frames. ], batch size: 45, lr: 4.71e-03, grad_scale: 8.0 +2023-03-09 17:33:24,631 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.879e+02 2.822e+02 3.199e+02 4.147e+02 1.139e+03, threshold=6.397e+02, percent-clipped=6.0 +2023-03-09 17:33:32,865 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84800.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:33:33,883 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84801.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:34:10,461 INFO [train.py:898] (2/4) Epoch 24, batch 1250, loss[loss=0.1451, simple_loss=0.2273, pruned_loss=0.03148, over 18428.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.2504, pruned_loss=0.03517, over 3603731.99 frames. ], batch size: 43, lr: 4.70e-03, grad_scale: 4.0 +2023-03-09 17:34:18,802 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0287, 5.8202, 5.3250, 5.7692, 5.2166, 5.5915, 6.0169, 5.8085], + device='cuda:2'), covar=tensor([0.2600, 0.1188, 0.0920, 0.1172, 0.2814, 0.1178, 0.1010, 0.1180], + device='cuda:2'), in_proj_covar=tensor([0.0625, 0.0551, 0.0389, 0.0570, 0.0770, 0.0561, 0.0784, 0.0596], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 17:34:27,891 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9918, 5.0901, 5.3340, 5.3765, 4.9684, 5.8872, 5.5087, 5.1859], + device='cuda:2'), covar=tensor([0.1289, 0.0816, 0.0796, 0.0868, 0.1593, 0.0783, 0.0690, 0.1784], + device='cuda:2'), in_proj_covar=tensor([0.0367, 0.0296, 0.0317, 0.0322, 0.0333, 0.0433, 0.0291, 0.0425], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 17:34:32,769 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8110, 3.7609, 5.1093, 2.8449, 4.4600, 2.5747, 3.1398, 1.8260], + device='cuda:2'), covar=tensor([0.1281, 0.0944, 0.0216, 0.1078, 0.0565, 0.3011, 0.2860, 0.2348], + device='cuda:2'), in_proj_covar=tensor([0.0227, 0.0250, 0.0210, 0.0205, 0.0265, 0.0278, 0.0335, 0.0244], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 17:34:36,651 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84855.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:34:43,635 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84861.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:35:06,904 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84881.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:35:08,996 INFO [train.py:898] (2/4) Epoch 24, batch 1300, loss[loss=0.1452, simple_loss=0.2321, pruned_loss=0.02913, over 18494.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2504, pruned_loss=0.03507, over 3599724.85 frames. ], batch size: 47, lr: 4.70e-03, grad_scale: 4.0 +2023-03-09 17:35:19,355 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84892.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:35:21,286 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.784e+02 2.717e+02 3.045e+02 3.603e+02 7.814e+02, threshold=6.090e+02, percent-clipped=3.0 +2023-03-09 17:35:22,333 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-09 17:35:31,911 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84903.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:35:56,405 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7089, 3.5910, 4.9620, 2.9580, 4.3264, 2.5390, 3.1145, 1.8267], + device='cuda:2'), covar=tensor([0.1232, 0.0962, 0.0150, 0.0931, 0.0514, 0.2588, 0.2636, 0.2202], + device='cuda:2'), in_proj_covar=tensor([0.0226, 0.0249, 0.0209, 0.0204, 0.0264, 0.0276, 0.0333, 0.0244], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 17:36:07,823 INFO [train.py:898] (2/4) Epoch 24, batch 1350, loss[loss=0.1778, simple_loss=0.2694, pruned_loss=0.04308, over 18447.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2498, pruned_loss=0.0348, over 3600882.10 frames. ], batch size: 59, lr: 4.70e-03, grad_scale: 4.0 +2023-03-09 17:36:16,023 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=84940.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:36:25,438 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8940, 4.1438, 2.4380, 4.0299, 5.1979, 2.5880, 3.7185, 4.0171], + device='cuda:2'), covar=tensor([0.0219, 0.1352, 0.1740, 0.0753, 0.0106, 0.1288, 0.0748, 0.0795], + device='cuda:2'), in_proj_covar=tensor([0.0175, 0.0274, 0.0208, 0.0202, 0.0134, 0.0185, 0.0219, 0.0228], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 17:36:28,808 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84951.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:36:43,748 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.4404, 4.8415, 4.8441, 4.9382, 4.3561, 4.7466, 4.2002, 4.7214], + device='cuda:2'), covar=tensor([0.0268, 0.0357, 0.0247, 0.0443, 0.0422, 0.0296, 0.1219, 0.0392], + device='cuda:2'), in_proj_covar=tensor([0.0221, 0.0269, 0.0261, 0.0340, 0.0279, 0.0276, 0.0312, 0.0268], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 17:37:05,447 INFO [train.py:898] (2/4) Epoch 24, batch 1400, loss[loss=0.1488, simple_loss=0.2464, pruned_loss=0.02561, over 18311.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2492, pruned_loss=0.03451, over 3600626.29 frames. ], batch size: 54, lr: 4.70e-03, grad_scale: 4.0 +2023-03-09 17:37:13,030 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84989.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:37:15,461 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9394, 3.7705, 4.8375, 4.4582, 3.3101, 3.2018, 4.6075, 5.2257], + device='cuda:2'), covar=tensor([0.0725, 0.1505, 0.0314, 0.0400, 0.0952, 0.1128, 0.0343, 0.0252], + device='cuda:2'), in_proj_covar=tensor([0.0153, 0.0280, 0.0164, 0.0185, 0.0195, 0.0194, 0.0199, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 17:37:16,494 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84992.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:37:18,349 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.714e+02 2.377e+02 2.923e+02 3.851e+02 7.184e+02, threshold=5.846e+02, percent-clipped=2.0 +2023-03-09 17:37:39,016 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85012.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:37:41,791 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2232, 5.1572, 4.8550, 5.1386, 5.1065, 4.5573, 5.0528, 4.7859], + device='cuda:2'), covar=tensor([0.0430, 0.0554, 0.1316, 0.0702, 0.0637, 0.0435, 0.0438, 0.1116], + device='cuda:2'), in_proj_covar=tensor([0.0510, 0.0575, 0.0722, 0.0449, 0.0473, 0.0525, 0.0559, 0.0699], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 17:38:03,913 INFO [train.py:898] (2/4) Epoch 24, batch 1450, loss[loss=0.1601, simple_loss=0.2487, pruned_loss=0.03582, over 18483.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.2502, pruned_loss=0.03485, over 3600010.72 frames. ], batch size: 51, lr: 4.70e-03, grad_scale: 4.0 +2023-03-09 17:38:12,402 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85040.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:38:38,538 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6089, 3.4718, 4.6775, 4.1797, 3.1778, 2.9985, 4.1727, 4.8708], + device='cuda:2'), covar=tensor([0.0855, 0.1332, 0.0235, 0.0421, 0.0971, 0.1134, 0.0424, 0.0283], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0279, 0.0164, 0.0185, 0.0195, 0.0194, 0.0198, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 17:38:58,441 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8785, 2.9551, 2.1661, 3.2646, 2.5051, 2.9456, 2.3150, 2.9265], + device='cuda:2'), covar=tensor([0.0636, 0.0745, 0.1215, 0.0588, 0.0791, 0.0335, 0.1069, 0.0480], + device='cuda:2'), in_proj_covar=tensor([0.0220, 0.0230, 0.0193, 0.0292, 0.0196, 0.0274, 0.0206, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 17:39:01,471 INFO [train.py:898] (2/4) Epoch 24, batch 1500, loss[loss=0.1501, simple_loss=0.2298, pruned_loss=0.0352, over 18463.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2503, pruned_loss=0.03503, over 3593035.98 frames. ], batch size: 43, lr: 4.70e-03, grad_scale: 4.0 +2023-03-09 17:39:06,162 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9678, 5.5565, 2.7716, 5.3459, 5.2906, 5.5875, 5.4400, 2.9557], + device='cuda:2'), covar=tensor([0.0229, 0.0051, 0.0800, 0.0070, 0.0064, 0.0057, 0.0078, 0.0896], + device='cuda:2'), in_proj_covar=tensor([0.0090, 0.0082, 0.0096, 0.0096, 0.0087, 0.0077, 0.0086, 0.0097], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 17:39:14,117 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.797e+02 2.574e+02 3.009e+02 3.655e+02 5.814e+02, threshold=6.018e+02, percent-clipped=0.0 +2023-03-09 17:39:28,972 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85107.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:39:31,606 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-09 17:39:37,868 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85115.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:39:59,272 INFO [train.py:898] (2/4) Epoch 24, batch 1550, loss[loss=0.1591, simple_loss=0.2533, pruned_loss=0.03247, over 18491.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.2499, pruned_loss=0.0348, over 3599036.03 frames. ], batch size: 47, lr: 4.70e-03, grad_scale: 4.0 +2023-03-09 17:40:25,890 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85156.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:40:31,802 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2207, 4.3251, 2.9117, 4.2561, 5.4403, 3.0083, 4.0884, 4.0385], + device='cuda:2'), covar=tensor([0.0136, 0.1222, 0.1277, 0.0597, 0.0076, 0.0959, 0.0492, 0.0741], + device='cuda:2'), in_proj_covar=tensor([0.0175, 0.0272, 0.0206, 0.0201, 0.0133, 0.0185, 0.0218, 0.0227], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 17:40:39,432 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85168.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 17:40:48,499 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85176.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:40:55,239 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85181.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:40:57,801 INFO [train.py:898] (2/4) Epoch 24, batch 1600, loss[loss=0.1466, simple_loss=0.2458, pruned_loss=0.02366, over 18477.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2492, pruned_loss=0.03407, over 3600183.83 frames. ], batch size: 53, lr: 4.69e-03, grad_scale: 8.0 +2023-03-09 17:41:10,222 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-03-09 17:41:10,351 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.996e+02 2.511e+02 2.791e+02 3.550e+02 5.356e+02, threshold=5.582e+02, percent-clipped=0.0 +2023-03-09 17:41:51,991 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85229.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:41:57,473 INFO [train.py:898] (2/4) Epoch 24, batch 1650, loss[loss=0.1396, simple_loss=0.2255, pruned_loss=0.02689, over 18573.00 frames. ], tot_loss[loss=0.1584, simple_loss=0.2488, pruned_loss=0.03398, over 3597041.23 frames. ], batch size: 45, lr: 4.69e-03, grad_scale: 8.0 +2023-03-09 17:42:02,399 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5806, 2.2904, 2.4848, 2.5804, 3.1238, 4.3975, 4.3262, 3.2323], + device='cuda:2'), covar=tensor([0.1947, 0.2445, 0.3162, 0.1969, 0.2425, 0.0333, 0.0453, 0.0970], + device='cuda:2'), in_proj_covar=tensor([0.0313, 0.0354, 0.0393, 0.0284, 0.0394, 0.0252, 0.0300, 0.0264], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 17:42:22,185 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8140, 4.5873, 4.5071, 3.3043, 3.7864, 3.4340, 2.6590, 2.5387], + device='cuda:2'), covar=tensor([0.0243, 0.0156, 0.0095, 0.0339, 0.0347, 0.0243, 0.0733, 0.0860], + device='cuda:2'), in_proj_covar=tensor([0.0072, 0.0061, 0.0065, 0.0069, 0.0091, 0.0068, 0.0077, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 17:42:55,787 INFO [train.py:898] (2/4) Epoch 24, batch 1700, loss[loss=0.1768, simple_loss=0.2668, pruned_loss=0.0434, over 18252.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2484, pruned_loss=0.03408, over 3594081.30 frames. ], batch size: 60, lr: 4.69e-03, grad_scale: 8.0 +2023-03-09 17:43:03,211 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85289.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:43:08,657 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.937e+02 2.815e+02 3.263e+02 3.900e+02 7.200e+02, threshold=6.526e+02, percent-clipped=2.0 +2023-03-09 17:43:23,860 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85307.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:43:41,508 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6119, 2.4143, 2.5631, 2.5884, 3.2101, 4.8268, 4.7053, 3.2711], + device='cuda:2'), covar=tensor([0.1984, 0.2403, 0.3287, 0.1991, 0.2509, 0.0234, 0.0374, 0.1034], + device='cuda:2'), in_proj_covar=tensor([0.0314, 0.0356, 0.0395, 0.0285, 0.0396, 0.0254, 0.0301, 0.0265], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 17:43:49,063 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7524, 5.1545, 5.1401, 5.2050, 4.6112, 5.0691, 4.5005, 5.0594], + device='cuda:2'), covar=tensor([0.0241, 0.0317, 0.0231, 0.0372, 0.0483, 0.0237, 0.1226, 0.0302], + device='cuda:2'), in_proj_covar=tensor([0.0221, 0.0268, 0.0262, 0.0342, 0.0279, 0.0275, 0.0312, 0.0267], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0007, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 17:43:52,198 INFO [train.py:898] (2/4) Epoch 24, batch 1750, loss[loss=0.1673, simple_loss=0.264, pruned_loss=0.03529, over 18143.00 frames. ], tot_loss[loss=0.159, simple_loss=0.2493, pruned_loss=0.03436, over 3589846.31 frames. ], batch size: 62, lr: 4.69e-03, grad_scale: 8.0 +2023-03-09 17:43:57,786 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85337.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:44:22,538 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-09 17:44:48,395 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8944, 4.1037, 2.4564, 4.2241, 5.2313, 2.8777, 3.9493, 4.0166], + device='cuda:2'), covar=tensor([0.0211, 0.1260, 0.1708, 0.0608, 0.0096, 0.1091, 0.0617, 0.0807], + device='cuda:2'), in_proj_covar=tensor([0.0175, 0.0274, 0.0206, 0.0201, 0.0134, 0.0185, 0.0219, 0.0227], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 17:44:49,066 INFO [train.py:898] (2/4) Epoch 24, batch 1800, loss[loss=0.1379, simple_loss=0.2275, pruned_loss=0.02417, over 18339.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.25, pruned_loss=0.03438, over 3589537.25 frames. ], batch size: 46, lr: 4.69e-03, grad_scale: 8.0 +2023-03-09 17:45:01,654 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.3932, 4.7382, 4.3593, 4.6445, 4.4760, 4.4189, 4.8625, 4.7632], + device='cuda:2'), covar=tensor([0.1051, 0.0874, 0.1920, 0.0742, 0.1383, 0.0692, 0.0682, 0.0857], + device='cuda:2'), in_proj_covar=tensor([0.0624, 0.0547, 0.0391, 0.0573, 0.0768, 0.0560, 0.0782, 0.0594], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 17:45:02,454 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.840e+02 2.661e+02 3.005e+02 3.822e+02 6.057e+02, threshold=6.010e+02, percent-clipped=0.0 +2023-03-09 17:45:11,055 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9840, 4.2027, 2.6045, 4.2792, 5.2233, 2.8084, 4.0228, 4.1227], + device='cuda:2'), covar=tensor([0.0184, 0.1239, 0.1537, 0.0573, 0.0098, 0.1082, 0.0574, 0.0624], + device='cuda:2'), in_proj_covar=tensor([0.0175, 0.0275, 0.0206, 0.0202, 0.0134, 0.0186, 0.0220, 0.0228], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 17:45:27,462 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8756, 5.1662, 2.6675, 5.0709, 4.8151, 5.1456, 4.9509, 2.4428], + device='cuda:2'), covar=tensor([0.0236, 0.0082, 0.0831, 0.0105, 0.0091, 0.0129, 0.0122, 0.1211], + device='cuda:2'), in_proj_covar=tensor([0.0088, 0.0081, 0.0096, 0.0095, 0.0086, 0.0076, 0.0085, 0.0095], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 17:45:42,417 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85428.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:45:47,766 INFO [train.py:898] (2/4) Epoch 24, batch 1850, loss[loss=0.1682, simple_loss=0.2503, pruned_loss=0.04306, over 18416.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2505, pruned_loss=0.03476, over 3589320.35 frames. ], batch size: 48, lr: 4.69e-03, grad_scale: 8.0 +2023-03-09 17:46:05,519 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3880, 5.8289, 5.5074, 5.6564, 5.4400, 5.2771, 5.9361, 5.8666], + device='cuda:2'), covar=tensor([0.1163, 0.0814, 0.0434, 0.0779, 0.1481, 0.0751, 0.0622, 0.0764], + device='cuda:2'), in_proj_covar=tensor([0.0622, 0.0544, 0.0389, 0.0571, 0.0765, 0.0559, 0.0780, 0.0593], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 17:46:15,486 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85456.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:46:23,634 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85463.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 17:46:30,400 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6798, 3.6701, 3.5111, 3.1913, 3.3871, 2.8532, 2.7710, 3.6661], + device='cuda:2'), covar=tensor([0.0065, 0.0101, 0.0081, 0.0139, 0.0109, 0.0194, 0.0235, 0.0066], + device='cuda:2'), in_proj_covar=tensor([0.0146, 0.0166, 0.0139, 0.0190, 0.0148, 0.0181, 0.0187, 0.0125], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 17:46:32,414 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85471.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:46:45,595 INFO [train.py:898] (2/4) Epoch 24, batch 1900, loss[loss=0.1615, simple_loss=0.26, pruned_loss=0.03151, over 18357.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.2506, pruned_loss=0.03463, over 3599366.42 frames. ], batch size: 50, lr: 4.69e-03, grad_scale: 8.0 +2023-03-09 17:46:46,000 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85483.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:46:52,927 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85489.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:46:58,608 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.851e+02 2.648e+02 3.187e+02 3.710e+02 7.319e+02, threshold=6.375e+02, percent-clipped=2.0 +2023-03-09 17:47:10,833 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85504.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:47:24,464 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9170, 4.1434, 2.4202, 4.1992, 5.1886, 2.6749, 4.0459, 4.2734], + device='cuda:2'), covar=tensor([0.0209, 0.1137, 0.1639, 0.0550, 0.0111, 0.1148, 0.0574, 0.0576], + device='cuda:2'), in_proj_covar=tensor([0.0176, 0.0275, 0.0207, 0.0202, 0.0135, 0.0187, 0.0221, 0.0228], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 17:47:40,203 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85530.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:47:43,284 INFO [train.py:898] (2/4) Epoch 24, batch 1950, loss[loss=0.1314, simple_loss=0.2101, pruned_loss=0.02639, over 18389.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.2509, pruned_loss=0.03469, over 3598316.35 frames. ], batch size: 43, lr: 4.68e-03, grad_scale: 8.0 +2023-03-09 17:47:51,430 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0965, 3.4870, 3.3577, 2.8723, 3.1155, 2.8105, 2.4682, 2.3186], + device='cuda:2'), covar=tensor([0.0305, 0.0186, 0.0145, 0.0322, 0.0341, 0.0272, 0.0644, 0.0765], + device='cuda:2'), in_proj_covar=tensor([0.0072, 0.0061, 0.0065, 0.0069, 0.0092, 0.0068, 0.0078, 0.0085], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 17:47:57,204 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85544.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:48:18,523 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.94 vs. limit=5.0 +2023-03-09 17:48:28,449 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7362, 4.4343, 4.3325, 3.2377, 3.7028, 3.3305, 2.4596, 2.6041], + device='cuda:2'), covar=tensor([0.0266, 0.0139, 0.0090, 0.0339, 0.0378, 0.0263, 0.0752, 0.0789], + device='cuda:2'), in_proj_covar=tensor([0.0073, 0.0061, 0.0065, 0.0069, 0.0091, 0.0068, 0.0078, 0.0085], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0004, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 17:48:41,759 INFO [train.py:898] (2/4) Epoch 24, batch 2000, loss[loss=0.1513, simple_loss=0.2358, pruned_loss=0.03337, over 18271.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2511, pruned_loss=0.03496, over 3588751.28 frames. ], batch size: 47, lr: 4.68e-03, grad_scale: 8.0 +2023-03-09 17:48:50,868 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85591.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:48:54,305 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5930, 6.0503, 5.5495, 5.9075, 5.6779, 5.5249, 6.1495, 6.0603], + device='cuda:2'), covar=tensor([0.1156, 0.0698, 0.0500, 0.0688, 0.1356, 0.0695, 0.0509, 0.0698], + device='cuda:2'), in_proj_covar=tensor([0.0629, 0.0547, 0.0394, 0.0575, 0.0773, 0.0567, 0.0786, 0.0600], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 17:48:55,651 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.729e+02 2.662e+02 3.281e+02 4.045e+02 7.861e+02, threshold=6.561e+02, percent-clipped=1.0 +2023-03-09 17:49:10,687 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85607.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:49:25,008 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7473, 3.4721, 5.3452, 3.1029, 4.6766, 2.5835, 3.1208, 1.8535], + device='cuda:2'), covar=tensor([0.1184, 0.1014, 0.0107, 0.0761, 0.0398, 0.2534, 0.2534, 0.2091], + device='cuda:2'), in_proj_covar=tensor([0.0226, 0.0248, 0.0208, 0.0203, 0.0261, 0.0275, 0.0332, 0.0243], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 17:49:31,490 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85625.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:49:40,113 INFO [train.py:898] (2/4) Epoch 24, batch 2050, loss[loss=0.1418, simple_loss=0.2283, pruned_loss=0.02767, over 18276.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.2504, pruned_loss=0.03464, over 3585496.74 frames. ], batch size: 45, lr: 4.68e-03, grad_scale: 8.0 +2023-03-09 17:50:06,869 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85655.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:50:08,615 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.79 vs. limit=5.0 +2023-03-09 17:50:38,496 INFO [train.py:898] (2/4) Epoch 24, batch 2100, loss[loss=0.168, simple_loss=0.2527, pruned_loss=0.04159, over 18395.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.2503, pruned_loss=0.03497, over 3567403.53 frames. ], batch size: 50, lr: 4.68e-03, grad_scale: 8.0 +2023-03-09 17:50:42,198 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85686.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 17:50:52,034 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.890e+02 2.606e+02 3.124e+02 4.074e+02 9.215e+02, threshold=6.247e+02, percent-clipped=3.0 +2023-03-09 17:51:02,137 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7904, 4.1084, 2.3495, 4.1118, 5.1847, 2.5451, 3.7630, 3.9947], + device='cuda:2'), covar=tensor([0.0208, 0.1117, 0.1800, 0.0626, 0.0085, 0.1344, 0.0752, 0.0701], + device='cuda:2'), in_proj_covar=tensor([0.0175, 0.0274, 0.0206, 0.0201, 0.0134, 0.0186, 0.0219, 0.0227], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 17:51:37,109 INFO [train.py:898] (2/4) Epoch 24, batch 2150, loss[loss=0.1811, simple_loss=0.2688, pruned_loss=0.04668, over 13345.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2507, pruned_loss=0.03493, over 3565906.89 frames. ], batch size: 131, lr: 4.68e-03, grad_scale: 8.0 +2023-03-09 17:52:12,649 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1265, 5.2060, 5.2583, 4.9556, 4.9981, 5.0382, 5.3164, 5.2855], + device='cuda:2'), covar=tensor([0.0065, 0.0063, 0.0049, 0.0116, 0.0051, 0.0134, 0.0086, 0.0100], + device='cuda:2'), in_proj_covar=tensor([0.0097, 0.0072, 0.0077, 0.0097, 0.0077, 0.0107, 0.0090, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 17:52:12,663 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85763.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:52:21,462 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85771.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:52:35,318 INFO [train.py:898] (2/4) Epoch 24, batch 2200, loss[loss=0.1713, simple_loss=0.2671, pruned_loss=0.03773, over 18297.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.25, pruned_loss=0.03474, over 3583295.34 frames. ], batch size: 57, lr: 4.68e-03, grad_scale: 4.0 +2023-03-09 17:52:36,665 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85784.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:52:37,941 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9551, 4.9289, 4.6066, 4.8956, 4.8930, 4.3050, 4.8208, 4.5287], + device='cuda:2'), covar=tensor([0.0482, 0.0530, 0.1340, 0.0794, 0.0613, 0.0492, 0.0444, 0.1131], + device='cuda:2'), in_proj_covar=tensor([0.0508, 0.0576, 0.0722, 0.0447, 0.0470, 0.0525, 0.0556, 0.0698], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 17:52:49,954 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.809e+02 2.630e+02 3.062e+02 3.735e+02 6.133e+02, threshold=6.125e+02, percent-clipped=0.0 +2023-03-09 17:53:08,617 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85811.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:53:17,771 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=85819.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:53:25,812 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9477, 5.4274, 3.0184, 5.2998, 5.1858, 5.4799, 5.3051, 3.0328], + device='cuda:2'), covar=tensor([0.0209, 0.0081, 0.0651, 0.0065, 0.0066, 0.0065, 0.0083, 0.0815], + device='cuda:2'), in_proj_covar=tensor([0.0089, 0.0082, 0.0097, 0.0096, 0.0087, 0.0077, 0.0086, 0.0097], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 17:53:33,954 INFO [train.py:898] (2/4) Epoch 24, batch 2250, loss[loss=0.1672, simple_loss=0.2625, pruned_loss=0.03588, over 18299.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2499, pruned_loss=0.0348, over 3591420.27 frames. ], batch size: 54, lr: 4.68e-03, grad_scale: 4.0 +2023-03-09 17:53:41,002 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85839.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:53:50,135 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5490, 5.5188, 5.1180, 5.5254, 5.4098, 4.8735, 5.3482, 5.0883], + device='cuda:2'), covar=tensor([0.0407, 0.0417, 0.1298, 0.0654, 0.0625, 0.0406, 0.0405, 0.1050], + device='cuda:2'), in_proj_covar=tensor([0.0510, 0.0577, 0.0725, 0.0447, 0.0473, 0.0526, 0.0559, 0.0700], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 17:54:23,523 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85875.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:54:28,164 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6037, 3.5316, 4.8744, 4.0746, 3.2073, 2.8969, 4.2427, 5.0910], + device='cuda:2'), covar=tensor([0.0822, 0.1438, 0.0195, 0.0503, 0.0968, 0.1226, 0.0407, 0.0281], + device='cuda:2'), in_proj_covar=tensor([0.0154, 0.0282, 0.0165, 0.0186, 0.0197, 0.0196, 0.0199, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 17:54:33,026 INFO [train.py:898] (2/4) Epoch 24, batch 2300, loss[loss=0.1659, simple_loss=0.2591, pruned_loss=0.03631, over 18292.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.25, pruned_loss=0.03493, over 3589897.17 frames. ], batch size: 57, lr: 4.68e-03, grad_scale: 4.0 +2023-03-09 17:54:36,448 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85886.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:54:41,623 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 17:54:47,540 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.050e+02 2.637e+02 3.133e+02 3.716e+02 1.062e+03, threshold=6.266e+02, percent-clipped=3.0 +2023-03-09 17:55:23,145 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8009, 3.1358, 4.5769, 3.8481, 2.7913, 4.7432, 4.0032, 3.0251], + device='cuda:2'), covar=tensor([0.0445, 0.1273, 0.0241, 0.0419, 0.1466, 0.0199, 0.0502, 0.0940], + device='cuda:2'), in_proj_covar=tensor([0.0214, 0.0239, 0.0219, 0.0167, 0.0226, 0.0215, 0.0254, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 17:55:27,559 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85930.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 17:55:30,548 INFO [train.py:898] (2/4) Epoch 24, batch 2350, loss[loss=0.1628, simple_loss=0.254, pruned_loss=0.0358, over 18473.00 frames. ], tot_loss[loss=0.1602, simple_loss=0.2501, pruned_loss=0.03515, over 3594884.42 frames. ], batch size: 53, lr: 4.67e-03, grad_scale: 4.0 +2023-03-09 17:55:34,688 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85936.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:55:42,504 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.6227, 5.5620, 5.2272, 5.5662, 5.5090, 4.9458, 5.4211, 5.1443], + device='cuda:2'), covar=tensor([0.0380, 0.0375, 0.1143, 0.0656, 0.0497, 0.0362, 0.0400, 0.1022], + device='cuda:2'), in_proj_covar=tensor([0.0510, 0.0577, 0.0723, 0.0445, 0.0473, 0.0525, 0.0559, 0.0697], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 17:55:58,178 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85957.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:56:00,507 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7764, 4.8476, 4.9201, 4.6155, 4.6937, 4.6706, 4.9771, 4.9460], + device='cuda:2'), covar=tensor([0.0076, 0.0070, 0.0064, 0.0126, 0.0064, 0.0169, 0.0100, 0.0106], + device='cuda:2'), in_proj_covar=tensor([0.0096, 0.0071, 0.0076, 0.0096, 0.0076, 0.0106, 0.0089, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 17:56:26,823 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85981.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 17:56:28,820 INFO [train.py:898] (2/4) Epoch 24, batch 2400, loss[loss=0.178, simple_loss=0.2665, pruned_loss=0.04473, over 18351.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.2505, pruned_loss=0.03551, over 3588824.51 frames. ], batch size: 56, lr: 4.67e-03, grad_scale: 8.0 +2023-03-09 17:56:38,986 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85991.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 17:56:44,284 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.063e+02 2.665e+02 3.240e+02 3.852e+02 9.011e+02, threshold=6.481e+02, percent-clipped=2.0 +2023-03-09 17:56:56,557 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86002.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:57:15,746 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86018.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:57:32,751 INFO [train.py:898] (2/4) Epoch 24, batch 2450, loss[loss=0.185, simple_loss=0.2775, pruned_loss=0.04625, over 18319.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.2509, pruned_loss=0.03519, over 3593001.30 frames. ], batch size: 57, lr: 4.67e-03, grad_scale: 8.0 +2023-03-09 17:57:54,813 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86052.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:58:07,270 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86063.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:58:14,923 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86069.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:58:31,046 INFO [train.py:898] (2/4) Epoch 24, batch 2500, loss[loss=0.1421, simple_loss=0.2304, pruned_loss=0.02693, over 18170.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.2508, pruned_loss=0.03506, over 3598790.14 frames. ], batch size: 44, lr: 4.67e-03, grad_scale: 4.0 +2023-03-09 17:58:32,386 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86084.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:58:46,797 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.916e+02 2.583e+02 3.100e+02 3.790e+02 6.020e+02, threshold=6.201e+02, percent-clipped=0.0 +2023-03-09 17:58:55,316 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-03-09 17:59:04,977 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86113.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:59:22,740 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0488, 4.2744, 2.3665, 4.0921, 5.3563, 2.7792, 3.9607, 4.0658], + device='cuda:2'), covar=tensor([0.0175, 0.1155, 0.1765, 0.0651, 0.0067, 0.1274, 0.0633, 0.0709], + device='cuda:2'), in_proj_covar=tensor([0.0174, 0.0273, 0.0206, 0.0200, 0.0133, 0.0185, 0.0218, 0.0227], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 17:59:25,975 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86130.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:59:27,920 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86132.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:59:28,884 INFO [train.py:898] (2/4) Epoch 24, batch 2550, loss[loss=0.147, simple_loss=0.2398, pruned_loss=0.02711, over 18366.00 frames. ], tot_loss[loss=0.1605, simple_loss=0.2508, pruned_loss=0.03515, over 3592730.59 frames. ], batch size: 46, lr: 4.67e-03, grad_scale: 4.0 +2023-03-09 17:59:35,723 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86139.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 17:59:37,309 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.96 vs. limit=5.0 +2023-03-09 18:00:26,690 INFO [train.py:898] (2/4) Epoch 24, batch 2600, loss[loss=0.1724, simple_loss=0.2686, pruned_loss=0.03807, over 18513.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.252, pruned_loss=0.03546, over 3584393.92 frames. ], batch size: 53, lr: 4.67e-03, grad_scale: 4.0 +2023-03-09 18:00:30,337 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86186.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:00:31,346 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86187.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:00:42,999 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.777e+02 2.596e+02 3.106e+02 3.737e+02 7.665e+02, threshold=6.211e+02, percent-clipped=2.0 +2023-03-09 18:01:23,371 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86231.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:01:25,445 INFO [train.py:898] (2/4) Epoch 24, batch 2650, loss[loss=0.1539, simple_loss=0.2485, pruned_loss=0.02965, over 18342.00 frames. ], tot_loss[loss=0.161, simple_loss=0.2516, pruned_loss=0.0352, over 3568757.03 frames. ], batch size: 55, lr: 4.67e-03, grad_scale: 4.0 +2023-03-09 18:01:26,649 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86234.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:01:41,028 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86246.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:02:22,554 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86281.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:02:24,600 INFO [train.py:898] (2/4) Epoch 24, batch 2700, loss[loss=0.1598, simple_loss=0.2465, pruned_loss=0.03659, over 18381.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.2512, pruned_loss=0.03525, over 3558550.69 frames. ], batch size: 42, lr: 4.66e-03, grad_scale: 4.0 +2023-03-09 18:02:28,144 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86286.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 18:02:41,077 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.842e+02 2.486e+02 2.896e+02 3.686e+02 7.740e+02, threshold=5.792e+02, percent-clipped=4.0 +2023-03-09 18:02:52,852 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86307.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:02:59,471 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86313.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:02:59,664 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5345, 3.3884, 2.2978, 4.3838, 2.9001, 4.1288, 2.2313, 3.8887], + device='cuda:2'), covar=tensor([0.0670, 0.0880, 0.1466, 0.0443, 0.0952, 0.0311, 0.1368, 0.0433], + device='cuda:2'), in_proj_covar=tensor([0.0218, 0.0227, 0.0191, 0.0289, 0.0195, 0.0269, 0.0203, 0.0203], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 18:03:15,567 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 18:03:18,480 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86329.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:03:23,319 INFO [train.py:898] (2/4) Epoch 24, batch 2750, loss[loss=0.1991, simple_loss=0.2884, pruned_loss=0.0549, over 18076.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.2512, pruned_loss=0.03512, over 3570995.58 frames. ], batch size: 62, lr: 4.66e-03, grad_scale: 4.0 +2023-03-09 18:03:52,864 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86358.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:03:54,134 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3450, 5.3037, 4.9808, 5.2711, 5.2740, 4.7190, 5.2094, 4.8858], + device='cuda:2'), covar=tensor([0.0439, 0.0453, 0.1194, 0.0752, 0.0584, 0.0425, 0.0401, 0.1083], + device='cuda:2'), in_proj_covar=tensor([0.0511, 0.0578, 0.0721, 0.0443, 0.0471, 0.0525, 0.0556, 0.0691], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 18:04:05,264 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1170, 5.0936, 4.7533, 5.0618, 5.0554, 4.4936, 4.9772, 4.7493], + device='cuda:2'), covar=tensor([0.0441, 0.0507, 0.1295, 0.0756, 0.0606, 0.0452, 0.0439, 0.1084], + device='cuda:2'), in_proj_covar=tensor([0.0511, 0.0579, 0.0722, 0.0443, 0.0471, 0.0526, 0.0557, 0.0692], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 18:04:21,061 INFO [train.py:898] (2/4) Epoch 24, batch 2800, loss[loss=0.1662, simple_loss=0.2605, pruned_loss=0.03599, over 18315.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2502, pruned_loss=0.03491, over 3581677.46 frames. ], batch size: 54, lr: 4.66e-03, grad_scale: 8.0 +2023-03-09 18:04:37,476 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.618e+02 2.577e+02 3.017e+02 3.554e+02 5.358e+02, threshold=6.034e+02, percent-clipped=0.0 +2023-03-09 18:04:50,982 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86408.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:05:10,669 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86425.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:05:19,743 INFO [train.py:898] (2/4) Epoch 24, batch 2850, loss[loss=0.1428, simple_loss=0.2275, pruned_loss=0.02905, over 18248.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2498, pruned_loss=0.03455, over 3585697.20 frames. ], batch size: 45, lr: 4.66e-03, grad_scale: 8.0 +2023-03-09 18:05:29,122 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86440.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:05:31,383 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0262, 5.0991, 5.1260, 4.8080, 4.8599, 4.9398, 5.1594, 5.1186], + device='cuda:2'), covar=tensor([0.0078, 0.0072, 0.0068, 0.0127, 0.0064, 0.0171, 0.0088, 0.0114], + device='cuda:2'), in_proj_covar=tensor([0.0098, 0.0072, 0.0078, 0.0097, 0.0078, 0.0108, 0.0091, 0.0090], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 18:05:34,595 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86445.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:06:18,032 INFO [train.py:898] (2/4) Epoch 24, batch 2900, loss[loss=0.1767, simple_loss=0.2725, pruned_loss=0.04041, over 18039.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2488, pruned_loss=0.03423, over 3587574.38 frames. ], batch size: 65, lr: 4.66e-03, grad_scale: 4.0 +2023-03-09 18:06:36,144 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.815e+02 2.792e+02 3.220e+02 3.955e+02 8.779e+02, threshold=6.440e+02, percent-clipped=4.0 +2023-03-09 18:06:39,811 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8480, 4.4605, 4.4795, 3.4728, 3.7564, 3.4982, 2.4804, 2.4918], + device='cuda:2'), covar=tensor([0.0208, 0.0155, 0.0095, 0.0292, 0.0316, 0.0220, 0.0774, 0.0880], + device='cuda:2'), in_proj_covar=tensor([0.0073, 0.0062, 0.0065, 0.0069, 0.0091, 0.0068, 0.0078, 0.0085], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 18:06:39,871 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86501.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:06:45,482 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86506.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 18:06:53,683 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.3941, 4.7771, 4.3224, 4.6623, 4.4842, 4.3938, 4.8967, 4.7992], + device='cuda:2'), covar=tensor([0.1196, 0.0813, 0.1865, 0.0756, 0.1337, 0.0789, 0.0656, 0.0733], + device='cuda:2'), in_proj_covar=tensor([0.0617, 0.0540, 0.0386, 0.0567, 0.0759, 0.0560, 0.0773, 0.0590], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 18:07:14,293 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86531.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:07:16,278 INFO [train.py:898] (2/4) Epoch 24, batch 2950, loss[loss=0.1519, simple_loss=0.2407, pruned_loss=0.03156, over 18492.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2485, pruned_loss=0.03407, over 3592320.74 frames. ], batch size: 47, lr: 4.66e-03, grad_scale: 4.0 +2023-03-09 18:08:10,861 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86579.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:08:12,180 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86580.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:08:15,201 INFO [train.py:898] (2/4) Epoch 24, batch 3000, loss[loss=0.1724, simple_loss=0.2674, pruned_loss=0.03869, over 18278.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2485, pruned_loss=0.03407, over 3588029.37 frames. ], batch size: 57, lr: 4.66e-03, grad_scale: 4.0 +2023-03-09 18:08:15,202 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 18:08:27,160 INFO [train.py:932] (2/4) Epoch 24, validation: loss=0.1501, simple_loss=0.2489, pruned_loss=0.02569, over 944034.00 frames. +2023-03-09 18:08:27,161 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 18:08:31,503 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86586.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 18:08:41,516 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6331, 3.9838, 2.5232, 3.8179, 4.8637, 2.5196, 3.4863, 3.6777], + device='cuda:2'), covar=tensor([0.0214, 0.1155, 0.1625, 0.0691, 0.0102, 0.1313, 0.0844, 0.0845], + device='cuda:2'), in_proj_covar=tensor([0.0176, 0.0277, 0.0207, 0.0202, 0.0135, 0.0187, 0.0220, 0.0229], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 18:08:44,561 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.708e+02 2.585e+02 3.043e+02 3.671e+02 7.667e+02, threshold=6.086e+02, percent-clipped=3.0 +2023-03-09 18:08:47,825 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-09 18:08:50,191 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86602.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:09:02,768 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86613.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:09:06,728 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8847, 5.3598, 5.3605, 5.3495, 4.8244, 5.2354, 4.7199, 5.2688], + device='cuda:2'), covar=tensor([0.0272, 0.0327, 0.0179, 0.0387, 0.0386, 0.0243, 0.1096, 0.0293], + device='cuda:2'), in_proj_covar=tensor([0.0222, 0.0269, 0.0264, 0.0344, 0.0279, 0.0275, 0.0312, 0.0267], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 18:09:26,132 INFO [train.py:898] (2/4) Epoch 24, batch 3050, loss[loss=0.1421, simple_loss=0.2358, pruned_loss=0.02416, over 18286.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2498, pruned_loss=0.03448, over 3585528.88 frames. ], batch size: 49, lr: 4.66e-03, grad_scale: 4.0 +2023-03-09 18:09:27,463 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86634.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 18:09:35,945 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86641.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:09:55,823 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86658.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:09:59,252 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86661.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:10:24,348 INFO [train.py:898] (2/4) Epoch 24, batch 3100, loss[loss=0.172, simple_loss=0.255, pruned_loss=0.0445, over 18376.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2496, pruned_loss=0.03455, over 3588959.85 frames. ], batch size: 50, lr: 4.65e-03, grad_scale: 4.0 +2023-03-09 18:10:41,642 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.778e+02 2.436e+02 2.944e+02 3.607e+02 6.796e+02, threshold=5.887e+02, percent-clipped=2.0 +2023-03-09 18:10:51,066 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86706.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:10:54,049 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86708.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:11:13,492 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86725.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:11:22,863 INFO [train.py:898] (2/4) Epoch 24, batch 3150, loss[loss=0.1577, simple_loss=0.2556, pruned_loss=0.02992, over 18402.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2499, pruned_loss=0.03462, over 3580423.39 frames. ], batch size: 52, lr: 4.65e-03, grad_scale: 4.0 +2023-03-09 18:11:49,644 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86756.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:12:09,639 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86773.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:12:13,772 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-09 18:12:20,968 INFO [train.py:898] (2/4) Epoch 24, batch 3200, loss[loss=0.2133, simple_loss=0.292, pruned_loss=0.06727, over 12671.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.2505, pruned_loss=0.03483, over 3573440.49 frames. ], batch size: 129, lr: 4.65e-03, grad_scale: 8.0 +2023-03-09 18:12:22,661 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8715, 3.7545, 5.0936, 2.9253, 4.4048, 2.6007, 3.1319, 1.6732], + device='cuda:2'), covar=tensor([0.1246, 0.0958, 0.0155, 0.0990, 0.0519, 0.2701, 0.2747, 0.2387], + device='cuda:2'), in_proj_covar=tensor([0.0225, 0.0248, 0.0211, 0.0203, 0.0261, 0.0277, 0.0331, 0.0243], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 18:12:36,043 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86796.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:12:37,986 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.596e+02 2.507e+02 2.980e+02 3.406e+02 8.664e+02, threshold=5.960e+02, percent-clipped=4.0 +2023-03-09 18:12:42,542 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86801.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 18:12:57,938 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86814.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:13:19,212 INFO [train.py:898] (2/4) Epoch 24, batch 3250, loss[loss=0.1768, simple_loss=0.2638, pruned_loss=0.04491, over 17888.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.2503, pruned_loss=0.03475, over 3576765.33 frames. ], batch size: 70, lr: 4.65e-03, grad_scale: 8.0 +2023-03-09 18:13:51,315 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8572, 5.3451, 5.3324, 5.2940, 4.7991, 5.2026, 4.7270, 5.2361], + device='cuda:2'), covar=tensor([0.0251, 0.0315, 0.0175, 0.0453, 0.0386, 0.0213, 0.1020, 0.0294], + device='cuda:2'), in_proj_covar=tensor([0.0225, 0.0273, 0.0268, 0.0350, 0.0284, 0.0279, 0.0317, 0.0272], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0006, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 18:14:08,876 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86875.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:14:12,222 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86878.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:14:14,993 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2640, 5.2504, 4.8818, 5.2016, 5.1837, 4.6424, 5.1159, 4.8804], + device='cuda:2'), covar=tensor([0.0477, 0.0465, 0.1434, 0.0794, 0.0709, 0.0425, 0.0439, 0.1092], + device='cuda:2'), in_proj_covar=tensor([0.0513, 0.0579, 0.0725, 0.0445, 0.0476, 0.0530, 0.0562, 0.0696], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 18:14:18,047 INFO [train.py:898] (2/4) Epoch 24, batch 3300, loss[loss=0.1467, simple_loss=0.2406, pruned_loss=0.02641, over 18490.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2498, pruned_loss=0.03464, over 3576652.77 frames. ], batch size: 51, lr: 4.65e-03, grad_scale: 8.0 +2023-03-09 18:14:35,712 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.783e+02 2.678e+02 3.309e+02 3.868e+02 1.091e+03, threshold=6.618e+02, percent-clipped=4.0 +2023-03-09 18:14:40,545 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86902.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:14:50,276 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.11 vs. limit=5.0 +2023-03-09 18:15:16,767 INFO [train.py:898] (2/4) Epoch 24, batch 3350, loss[loss=0.1449, simple_loss=0.2315, pruned_loss=0.02917, over 18381.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2495, pruned_loss=0.03458, over 3571474.19 frames. ], batch size: 50, lr: 4.65e-03, grad_scale: 8.0 +2023-03-09 18:15:20,489 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86936.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:15:24,155 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86939.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:15:36,571 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=86950.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:16:14,777 INFO [train.py:898] (2/4) Epoch 24, batch 3400, loss[loss=0.1498, simple_loss=0.2471, pruned_loss=0.02629, over 18633.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2493, pruned_loss=0.0344, over 3576451.45 frames. ], batch size: 52, lr: 4.65e-03, grad_scale: 8.0 +2023-03-09 18:16:32,664 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.872e+02 2.525e+02 2.880e+02 3.383e+02 6.366e+02, threshold=5.759e+02, percent-clipped=0.0 +2023-03-09 18:16:38,101 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87002.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 18:16:56,957 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-09 18:17:13,561 INFO [train.py:898] (2/4) Epoch 24, batch 3450, loss[loss=0.1637, simple_loss=0.2548, pruned_loss=0.03631, over 18219.00 frames. ], tot_loss[loss=0.1581, simple_loss=0.2484, pruned_loss=0.03397, over 3587417.29 frames. ], batch size: 60, lr: 4.64e-03, grad_scale: 8.0 +2023-03-09 18:17:19,990 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5761, 6.0436, 5.6025, 5.8823, 5.6638, 5.5575, 6.1585, 6.0845], + device='cuda:2'), covar=tensor([0.1266, 0.0732, 0.0412, 0.0709, 0.1415, 0.0678, 0.0582, 0.0696], + device='cuda:2'), in_proj_covar=tensor([0.0625, 0.0549, 0.0395, 0.0573, 0.0773, 0.0564, 0.0783, 0.0597], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 18:17:48,481 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87063.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 18:17:56,860 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6954, 4.0781, 5.3056, 4.5526, 2.9113, 2.9523, 4.4580, 5.5415], + device='cuda:2'), covar=tensor([0.0859, 0.1430, 0.0195, 0.0390, 0.1207, 0.1277, 0.0431, 0.0180], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0281, 0.0165, 0.0185, 0.0194, 0.0194, 0.0197, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 18:17:59,420 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-09 18:18:03,467 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6134, 3.2511, 4.3686, 3.9683, 3.0699, 2.9806, 3.8680, 4.5072], + device='cuda:2'), covar=tensor([0.0845, 0.1539, 0.0284, 0.0423, 0.1043, 0.1225, 0.0474, 0.0254], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0280, 0.0165, 0.0185, 0.0194, 0.0193, 0.0197, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 18:18:11,266 INFO [train.py:898] (2/4) Epoch 24, batch 3500, loss[loss=0.1336, simple_loss=0.2187, pruned_loss=0.02429, over 18418.00 frames. ], tot_loss[loss=0.1584, simple_loss=0.2484, pruned_loss=0.03421, over 3587085.69 frames. ], batch size: 43, lr: 4.64e-03, grad_scale: 8.0 +2023-03-09 18:18:26,453 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87096.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:18:28,425 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.855e+02 2.643e+02 3.056e+02 3.679e+02 7.050e+02, threshold=6.112e+02, percent-clipped=3.0 +2023-03-09 18:18:32,029 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87101.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 18:19:06,666 INFO [train.py:898] (2/4) Epoch 24, batch 3550, loss[loss=0.1606, simple_loss=0.252, pruned_loss=0.03464, over 17075.00 frames. ], tot_loss[loss=0.159, simple_loss=0.2492, pruned_loss=0.03435, over 3588977.92 frames. ], batch size: 78, lr: 4.64e-03, grad_scale: 8.0 +2023-03-09 18:19:18,593 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87144.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:19:23,970 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87149.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:19:46,344 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87170.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:19:54,795 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.55 vs. limit=5.0 +2023-03-09 18:19:58,734 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6624, 3.6833, 3.5214, 3.1114, 3.4446, 2.7635, 2.8167, 3.7736], + device='cuda:2'), covar=tensor([0.0067, 0.0088, 0.0087, 0.0139, 0.0094, 0.0193, 0.0210, 0.0058], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0168, 0.0142, 0.0193, 0.0149, 0.0184, 0.0189, 0.0127], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 18:20:00,498 INFO [train.py:898] (2/4) Epoch 24, batch 3600, loss[loss=0.159, simple_loss=0.2589, pruned_loss=0.02954, over 18506.00 frames. ], tot_loss[loss=0.1585, simple_loss=0.249, pruned_loss=0.03402, over 3589135.62 frames. ], batch size: 51, lr: 4.64e-03, grad_scale: 8.0 +2023-03-09 18:20:16,549 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.830e+02 2.465e+02 3.001e+02 3.686e+02 6.054e+02, threshold=6.002e+02, percent-clipped=0.0 +2023-03-09 18:21:02,999 INFO [train.py:898] (2/4) Epoch 25, batch 0, loss[loss=0.1387, simple_loss=0.2251, pruned_loss=0.02614, over 18501.00 frames. ], tot_loss[loss=0.1387, simple_loss=0.2251, pruned_loss=0.02614, over 18501.00 frames. ], batch size: 44, lr: 4.54e-03, grad_scale: 8.0 +2023-03-09 18:21:03,000 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 18:21:14,846 INFO [train.py:932] (2/4) Epoch 25, validation: loss=0.1499, simple_loss=0.2489, pruned_loss=0.0255, over 944034.00 frames. +2023-03-09 18:21:14,847 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 18:21:30,498 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87230.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:21:32,958 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9807, 3.7724, 5.1518, 3.0477, 4.4746, 2.6073, 3.1461, 1.8562], + device='cuda:2'), covar=tensor([0.1155, 0.0962, 0.0175, 0.0968, 0.0511, 0.2726, 0.2796, 0.2359], + device='cuda:2'), in_proj_covar=tensor([0.0225, 0.0248, 0.0213, 0.0203, 0.0262, 0.0277, 0.0331, 0.0243], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 18:21:32,990 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6984, 2.3389, 2.6932, 2.6664, 3.2230, 4.7981, 4.7638, 3.4161], + device='cuda:2'), covar=tensor([0.1941, 0.2643, 0.2817, 0.1987, 0.2462, 0.0289, 0.0381, 0.0991], + device='cuda:2'), in_proj_covar=tensor([0.0312, 0.0352, 0.0391, 0.0282, 0.0389, 0.0252, 0.0297, 0.0263], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 18:21:34,866 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87234.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:21:37,265 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87236.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:22:14,601 INFO [train.py:898] (2/4) Epoch 25, batch 50, loss[loss=0.159, simple_loss=0.2517, pruned_loss=0.03321, over 18402.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2503, pruned_loss=0.03443, over 814813.65 frames. ], batch size: 52, lr: 4.54e-03, grad_scale: 8.0 +2023-03-09 18:22:34,855 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87284.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:22:43,699 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87291.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:22:52,235 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.879e+02 2.489e+02 2.896e+02 3.619e+02 7.206e+02, threshold=5.791e+02, percent-clipped=1.0 +2023-03-09 18:23:14,360 INFO [train.py:898] (2/4) Epoch 25, batch 100, loss[loss=0.1609, simple_loss=0.2577, pruned_loss=0.03205, over 18135.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.25, pruned_loss=0.03429, over 1436486.79 frames. ], batch size: 62, lr: 4.54e-03, grad_scale: 8.0 +2023-03-09 18:23:19,346 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9711, 4.6774, 4.7044, 3.5338, 3.8095, 3.4818, 2.9280, 2.7486], + device='cuda:2'), covar=tensor([0.0206, 0.0134, 0.0080, 0.0306, 0.0316, 0.0231, 0.0626, 0.0765], + device='cuda:2'), in_proj_covar=tensor([0.0073, 0.0062, 0.0065, 0.0070, 0.0092, 0.0068, 0.0078, 0.0085], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 18:23:21,994 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 18:23:56,888 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-03-09 18:24:03,155 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87358.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 18:24:08,397 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87362.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:24:13,624 INFO [train.py:898] (2/4) Epoch 25, batch 150, loss[loss=0.165, simple_loss=0.2594, pruned_loss=0.0353, over 17192.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2499, pruned_loss=0.03466, over 1919167.82 frames. ], batch size: 78, lr: 4.54e-03, grad_scale: 8.0 +2023-03-09 18:24:49,024 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.772e+02 2.588e+02 3.217e+02 3.788e+02 6.268e+02, threshold=6.435e+02, percent-clipped=1.0 +2023-03-09 18:25:12,523 INFO [train.py:898] (2/4) Epoch 25, batch 200, loss[loss=0.1461, simple_loss=0.23, pruned_loss=0.0311, over 18431.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2498, pruned_loss=0.0347, over 2298681.77 frames. ], batch size: 43, lr: 4.54e-03, grad_scale: 8.0 +2023-03-09 18:25:19,873 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87423.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:25:32,411 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-03-09 18:25:37,067 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3650, 5.3526, 4.9138, 5.3336, 5.3047, 4.7956, 5.2269, 4.8809], + device='cuda:2'), covar=tensor([0.0533, 0.0632, 0.1601, 0.0859, 0.0734, 0.0575, 0.0566, 0.1313], + device='cuda:2'), in_proj_covar=tensor([0.0508, 0.0577, 0.0718, 0.0443, 0.0471, 0.0526, 0.0558, 0.0690], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 18:25:42,682 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8628, 4.9181, 5.0079, 4.6667, 4.7406, 4.7228, 5.0283, 5.0179], + device='cuda:2'), covar=tensor([0.0078, 0.0075, 0.0058, 0.0122, 0.0064, 0.0152, 0.0094, 0.0108], + device='cuda:2'), in_proj_covar=tensor([0.0098, 0.0072, 0.0078, 0.0096, 0.0077, 0.0107, 0.0090, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 18:25:56,387 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2869, 5.3400, 5.5769, 5.5833, 5.1602, 6.0980, 5.6979, 5.3105], + device='cuda:2'), covar=tensor([0.1187, 0.0610, 0.0778, 0.0843, 0.1401, 0.0711, 0.0634, 0.1672], + device='cuda:2'), in_proj_covar=tensor([0.0360, 0.0294, 0.0317, 0.0322, 0.0328, 0.0430, 0.0287, 0.0422], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 18:26:04,035 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87461.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:26:10,405 INFO [train.py:898] (2/4) Epoch 25, batch 250, loss[loss=0.1746, simple_loss=0.2619, pruned_loss=0.0437, over 18364.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2491, pruned_loss=0.03448, over 2584337.82 frames. ], batch size: 56, lr: 4.54e-03, grad_scale: 8.0 +2023-03-09 18:26:14,780 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87470.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:26:18,686 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-09 18:26:46,939 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.865e+02 2.586e+02 2.892e+02 3.324e+02 7.017e+02, threshold=5.784e+02, percent-clipped=1.0 +2023-03-09 18:26:55,122 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87505.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:27:09,426 INFO [train.py:898] (2/4) Epoch 25, batch 300, loss[loss=0.1595, simple_loss=0.2484, pruned_loss=0.03534, over 18283.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.25, pruned_loss=0.03459, over 2815580.05 frames. ], batch size: 49, lr: 4.54e-03, grad_scale: 8.0 +2023-03-09 18:27:10,679 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87518.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:27:16,055 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87522.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:27:29,495 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87534.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:27:53,241 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-09 18:28:07,723 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87566.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 18:28:08,434 INFO [train.py:898] (2/4) Epoch 25, batch 350, loss[loss=0.1594, simple_loss=0.2516, pruned_loss=0.03358, over 18274.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2506, pruned_loss=0.03435, over 3001153.13 frames. ], batch size: 49, lr: 4.53e-03, grad_scale: 8.0 +2023-03-09 18:28:26,401 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87582.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:28:28,911 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87584.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:28:30,949 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87586.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:28:34,513 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87589.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:28:44,689 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.762e+02 2.689e+02 3.189e+02 3.817e+02 6.961e+02, threshold=6.379e+02, percent-clipped=1.0 +2023-03-09 18:28:53,108 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87605.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:29:06,923 INFO [train.py:898] (2/4) Epoch 25, batch 400, loss[loss=0.1593, simple_loss=0.2469, pruned_loss=0.03588, over 17987.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.2502, pruned_loss=0.03443, over 3126766.80 frames. ], batch size: 65, lr: 4.53e-03, grad_scale: 8.0 +2023-03-09 18:29:40,359 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87645.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 18:29:46,520 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87650.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:29:55,299 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87658.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 18:30:04,958 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87666.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 18:30:05,645 INFO [train.py:898] (2/4) Epoch 25, batch 450, loss[loss=0.1453, simple_loss=0.2367, pruned_loss=0.02695, over 18174.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2498, pruned_loss=0.03444, over 3228604.19 frames. ], batch size: 44, lr: 4.53e-03, grad_scale: 8.0 +2023-03-09 18:30:42,035 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.987e+02 2.570e+02 3.208e+02 4.155e+02 1.022e+03, threshold=6.417e+02, percent-clipped=4.0 +2023-03-09 18:30:51,849 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87706.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 18:31:03,994 INFO [train.py:898] (2/4) Epoch 25, batch 500, loss[loss=0.1686, simple_loss=0.2636, pruned_loss=0.03675, over 15955.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.2491, pruned_loss=0.03428, over 3310545.45 frames. ], batch size: 94, lr: 4.53e-03, grad_scale: 8.0 +2023-03-09 18:31:06,073 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87718.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:31:15,011 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.91 vs. limit=5.0 +2023-03-09 18:31:55,265 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.6140, 5.5785, 5.2408, 5.5302, 5.5351, 4.9833, 5.4296, 5.1828], + device='cuda:2'), covar=tensor([0.0404, 0.0373, 0.1231, 0.0769, 0.0490, 0.0400, 0.0434, 0.1029], + device='cuda:2'), in_proj_covar=tensor([0.0509, 0.0575, 0.0722, 0.0447, 0.0468, 0.0526, 0.0560, 0.0692], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 18:32:03,143 INFO [train.py:898] (2/4) Epoch 25, batch 550, loss[loss=0.1569, simple_loss=0.2533, pruned_loss=0.03023, over 18403.00 frames. ], tot_loss[loss=0.1584, simple_loss=0.2485, pruned_loss=0.0341, over 3370677.62 frames. ], batch size: 52, lr: 4.53e-03, grad_scale: 8.0 +2023-03-09 18:32:12,007 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9327, 5.0781, 5.1084, 4.7536, 4.8547, 4.8733, 5.1343, 5.1540], + device='cuda:2'), covar=tensor([0.0079, 0.0055, 0.0053, 0.0104, 0.0056, 0.0141, 0.0070, 0.0089], + device='cuda:2'), in_proj_covar=tensor([0.0097, 0.0072, 0.0077, 0.0095, 0.0077, 0.0106, 0.0090, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 18:32:18,656 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.57 vs. limit=2.0 +2023-03-09 18:32:39,326 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.934e+02 2.781e+02 3.111e+02 3.709e+02 9.396e+02, threshold=6.222e+02, percent-clipped=2.0 +2023-03-09 18:32:43,466 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9334, 4.1335, 2.6601, 4.0269, 5.2352, 2.5792, 3.9303, 4.1859], + device='cuda:2'), covar=tensor([0.0196, 0.1311, 0.1481, 0.0667, 0.0078, 0.1214, 0.0647, 0.0676], + device='cuda:2'), in_proj_covar=tensor([0.0176, 0.0275, 0.0207, 0.0201, 0.0135, 0.0186, 0.0220, 0.0228], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 18:33:02,207 INFO [train.py:898] (2/4) Epoch 25, batch 600, loss[loss=0.1402, simple_loss=0.2307, pruned_loss=0.02489, over 18338.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.2478, pruned_loss=0.03361, over 3425754.12 frames. ], batch size: 46, lr: 4.53e-03, grad_scale: 8.0 +2023-03-09 18:33:02,427 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87817.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:33:13,773 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-03-09 18:33:54,586 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87861.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 18:34:01,472 INFO [train.py:898] (2/4) Epoch 25, batch 650, loss[loss=0.1516, simple_loss=0.2419, pruned_loss=0.03066, over 18536.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2473, pruned_loss=0.03375, over 3466832.64 frames. ], batch size: 49, lr: 4.53e-03, grad_scale: 8.0 +2023-03-09 18:34:23,964 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87886.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:34:33,699 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-09 18:34:38,403 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.755e+02 2.498e+02 2.822e+02 3.446e+02 7.564e+02, threshold=5.643e+02, percent-clipped=1.0 +2023-03-09 18:34:41,160 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9761, 3.9635, 3.7982, 3.4039, 3.6441, 2.9757, 3.1319, 3.9908], + device='cuda:2'), covar=tensor([0.0055, 0.0079, 0.0080, 0.0146, 0.0089, 0.0193, 0.0195, 0.0051], + device='cuda:2'), in_proj_covar=tensor([0.0148, 0.0166, 0.0141, 0.0191, 0.0147, 0.0182, 0.0187, 0.0126], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 18:35:00,456 INFO [train.py:898] (2/4) Epoch 25, batch 700, loss[loss=0.1489, simple_loss=0.2334, pruned_loss=0.03215, over 17383.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2467, pruned_loss=0.03365, over 3497087.41 frames. ], batch size: 38, lr: 4.53e-03, grad_scale: 8.0 +2023-03-09 18:35:20,550 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=87934.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:35:28,947 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87940.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 18:35:34,753 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87945.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:35:52,402 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87961.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 18:35:59,586 INFO [train.py:898] (2/4) Epoch 25, batch 750, loss[loss=0.1755, simple_loss=0.2584, pruned_loss=0.04629, over 12847.00 frames. ], tot_loss[loss=0.158, simple_loss=0.2482, pruned_loss=0.03391, over 3507783.83 frames. ], batch size: 130, lr: 4.52e-03, grad_scale: 8.0 +2023-03-09 18:36:07,024 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7318, 3.6392, 3.5377, 3.0882, 3.3636, 2.5103, 2.5199, 3.7885], + device='cuda:2'), covar=tensor([0.0071, 0.0117, 0.0089, 0.0184, 0.0110, 0.0282, 0.0330, 0.0059], + device='cuda:2'), in_proj_covar=tensor([0.0149, 0.0167, 0.0141, 0.0191, 0.0148, 0.0183, 0.0187, 0.0127], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 18:36:16,087 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.11 vs. limit=5.0 +2023-03-09 18:36:35,766 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.893e+02 2.719e+02 3.180e+02 3.674e+02 1.066e+03, threshold=6.360e+02, percent-clipped=6.0 +2023-03-09 18:36:52,457 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-09 18:36:59,862 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8971, 2.9735, 2.6451, 3.1222, 3.8534, 3.7421, 3.3227, 3.0579], + device='cuda:2'), covar=tensor([0.0153, 0.0328, 0.0570, 0.0333, 0.0153, 0.0162, 0.0390, 0.0409], + device='cuda:2'), in_proj_covar=tensor([0.0142, 0.0143, 0.0166, 0.0164, 0.0136, 0.0123, 0.0159, 0.0163], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 18:37:01,727 INFO [train.py:898] (2/4) Epoch 25, batch 800, loss[loss=0.1693, simple_loss=0.2657, pruned_loss=0.03646, over 18388.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.248, pruned_loss=0.03392, over 3534801.61 frames. ], batch size: 52, lr: 4.52e-03, grad_scale: 8.0 +2023-03-09 18:37:03,699 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88018.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:37:10,473 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6444, 3.0502, 4.4041, 3.6973, 2.8891, 4.6898, 3.9759, 3.0056], + device='cuda:2'), covar=tensor([0.0572, 0.1385, 0.0301, 0.0503, 0.1432, 0.0215, 0.0561, 0.0890], + device='cuda:2'), in_proj_covar=tensor([0.0219, 0.0245, 0.0228, 0.0171, 0.0229, 0.0220, 0.0257, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 18:37:58,657 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88066.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:37:59,621 INFO [train.py:898] (2/4) Epoch 25, batch 850, loss[loss=0.1524, simple_loss=0.2478, pruned_loss=0.02844, over 18628.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.249, pruned_loss=0.03415, over 3549190.87 frames. ], batch size: 52, lr: 4.52e-03, grad_scale: 8.0 +2023-03-09 18:38:25,159 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9174, 2.9579, 2.1978, 3.3193, 2.5854, 2.9286, 2.3548, 2.9410], + device='cuda:2'), covar=tensor([0.0644, 0.0791, 0.1272, 0.0607, 0.0814, 0.0342, 0.1074, 0.0475], + device='cuda:2'), in_proj_covar=tensor([0.0219, 0.0228, 0.0192, 0.0290, 0.0194, 0.0269, 0.0203, 0.0204], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 18:38:35,401 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.735e+02 2.499e+02 3.015e+02 3.591e+02 1.108e+03, threshold=6.031e+02, percent-clipped=1.0 +2023-03-09 18:38:39,166 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88100.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:38:50,796 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.88 vs. limit=5.0 +2023-03-09 18:38:58,093 INFO [train.py:898] (2/4) Epoch 25, batch 900, loss[loss=0.1393, simple_loss=0.2245, pruned_loss=0.02709, over 18425.00 frames. ], tot_loss[loss=0.1581, simple_loss=0.2484, pruned_loss=0.03391, over 3550414.18 frames. ], batch size: 43, lr: 4.52e-03, grad_scale: 8.0 +2023-03-09 18:38:58,443 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88117.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:39:39,225 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7588, 5.2200, 5.1650, 5.2030, 4.7056, 5.1279, 4.5972, 5.0712], + device='cuda:2'), covar=tensor([0.0240, 0.0281, 0.0202, 0.0467, 0.0419, 0.0226, 0.1091, 0.0330], + device='cuda:2'), in_proj_covar=tensor([0.0227, 0.0273, 0.0270, 0.0349, 0.0283, 0.0279, 0.0315, 0.0271], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 18:39:50,469 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88161.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 18:39:50,574 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88161.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:39:55,052 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88165.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:39:57,060 INFO [train.py:898] (2/4) Epoch 25, batch 950, loss[loss=0.184, simple_loss=0.2768, pruned_loss=0.04562, over 17903.00 frames. ], tot_loss[loss=0.158, simple_loss=0.2483, pruned_loss=0.03385, over 3541772.51 frames. ], batch size: 70, lr: 4.52e-03, grad_scale: 8.0 +2023-03-09 18:40:26,504 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.06 vs. limit=5.0 +2023-03-09 18:40:33,071 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.747e+02 2.618e+02 3.009e+02 3.619e+02 7.498e+02, threshold=6.018e+02, percent-clipped=2.0 +2023-03-09 18:40:34,568 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0896, 5.1505, 5.1521, 4.8854, 4.8853, 4.9776, 5.2498, 5.2401], + device='cuda:2'), covar=tensor([0.0078, 0.0066, 0.0066, 0.0111, 0.0057, 0.0142, 0.0092, 0.0097], + device='cuda:2'), in_proj_covar=tensor([0.0098, 0.0072, 0.0078, 0.0096, 0.0077, 0.0108, 0.0090, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 18:40:47,077 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88209.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:40:55,846 INFO [train.py:898] (2/4) Epoch 25, batch 1000, loss[loss=0.1604, simple_loss=0.2552, pruned_loss=0.03277, over 18626.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2482, pruned_loss=0.03371, over 3563061.90 frames. ], batch size: 52, lr: 4.52e-03, grad_scale: 8.0 +2023-03-09 18:41:22,525 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88240.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:41:28,245 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88245.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:41:43,496 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6708, 3.9447, 2.3970, 3.9348, 4.9783, 2.4908, 3.7962, 3.9345], + device='cuda:2'), covar=tensor([0.0189, 0.0926, 0.1524, 0.0592, 0.0103, 0.1157, 0.0676, 0.0637], + device='cuda:2'), in_proj_covar=tensor([0.0178, 0.0279, 0.0208, 0.0203, 0.0136, 0.0188, 0.0223, 0.0231], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 18:41:48,134 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88261.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 18:41:50,408 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1549, 3.7955, 5.2118, 2.9305, 4.5907, 2.7235, 3.2703, 1.9967], + device='cuda:2'), covar=tensor([0.1054, 0.0862, 0.0170, 0.1024, 0.0464, 0.2522, 0.2568, 0.2173], + device='cuda:2'), in_proj_covar=tensor([0.0229, 0.0252, 0.0217, 0.0207, 0.0266, 0.0281, 0.0336, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 18:41:54,327 INFO [train.py:898] (2/4) Epoch 25, batch 1050, loss[loss=0.1437, simple_loss=0.2315, pruned_loss=0.02798, over 18350.00 frames. ], tot_loss[loss=0.158, simple_loss=0.2484, pruned_loss=0.03379, over 3559822.68 frames. ], batch size: 46, lr: 4.52e-03, grad_scale: 8.0 +2023-03-09 18:42:18,489 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88288.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:42:22,084 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88291.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 18:42:24,176 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88293.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:42:29,463 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.898e+02 2.574e+02 3.147e+02 3.590e+02 6.876e+02, threshold=6.293e+02, percent-clipped=1.0 +2023-03-09 18:42:42,896 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88309.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:42:52,766 INFO [train.py:898] (2/4) Epoch 25, batch 1100, loss[loss=0.1501, simple_loss=0.2427, pruned_loss=0.0287, over 18406.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2483, pruned_loss=0.03401, over 3570509.75 frames. ], batch size: 52, lr: 4.52e-03, grad_scale: 8.0 +2023-03-09 18:43:32,842 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88352.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 18:43:38,919 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8550, 3.8728, 3.7237, 3.3406, 3.5720, 2.9608, 3.0404, 3.8370], + device='cuda:2'), covar=tensor([0.0060, 0.0095, 0.0082, 0.0149, 0.0100, 0.0193, 0.0178, 0.0079], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0169, 0.0142, 0.0194, 0.0150, 0.0184, 0.0189, 0.0128], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 18:43:50,009 INFO [train.py:898] (2/4) Epoch 25, batch 1150, loss[loss=0.1831, simple_loss=0.2672, pruned_loss=0.0495, over 12907.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2484, pruned_loss=0.03412, over 3568903.38 frames. ], batch size: 130, lr: 4.51e-03, grad_scale: 8.0 +2023-03-09 18:44:04,388 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8417, 5.2625, 5.2133, 5.2743, 4.7168, 5.1665, 4.6294, 5.1426], + device='cuda:2'), covar=tensor([0.0204, 0.0259, 0.0187, 0.0432, 0.0427, 0.0221, 0.1008, 0.0300], + device='cuda:2'), in_proj_covar=tensor([0.0225, 0.0270, 0.0267, 0.0346, 0.0282, 0.0277, 0.0312, 0.0269], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 18:44:25,855 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.694e+02 2.731e+02 3.095e+02 3.646e+02 6.544e+02, threshold=6.189e+02, percent-clipped=1.0 +2023-03-09 18:44:45,291 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8181, 2.6368, 2.7996, 2.8984, 3.3911, 4.9738, 4.9709, 3.4751], + device='cuda:2'), covar=tensor([0.1929, 0.2452, 0.3083, 0.1798, 0.2366, 0.0236, 0.0340, 0.1029], + device='cuda:2'), in_proj_covar=tensor([0.0317, 0.0357, 0.0397, 0.0286, 0.0394, 0.0255, 0.0300, 0.0266], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 18:44:47,539 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6288, 2.3243, 2.5446, 2.5456, 3.1096, 4.7307, 4.6255, 3.2081], + device='cuda:2'), covar=tensor([0.2050, 0.2595, 0.3251, 0.2115, 0.2591, 0.0279, 0.0410, 0.1082], + device='cuda:2'), in_proj_covar=tensor([0.0317, 0.0357, 0.0397, 0.0286, 0.0394, 0.0255, 0.0300, 0.0266], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 18:44:48,694 INFO [train.py:898] (2/4) Epoch 25, batch 1200, loss[loss=0.154, simple_loss=0.2518, pruned_loss=0.02814, over 18561.00 frames. ], tot_loss[loss=0.1585, simple_loss=0.2487, pruned_loss=0.03416, over 3567481.43 frames. ], batch size: 54, lr: 4.51e-03, grad_scale: 8.0 +2023-03-09 18:45:21,064 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-03-09 18:45:25,362 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-03-09 18:45:34,633 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=88456.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:45:46,401 INFO [train.py:898] (2/4) Epoch 25, batch 1250, loss[loss=0.1681, simple_loss=0.2673, pruned_loss=0.03443, over 18318.00 frames. ], tot_loss[loss=0.1592, simple_loss=0.2496, pruned_loss=0.03433, over 3571592.32 frames. ], batch size: 49, lr: 4.51e-03, grad_scale: 16.0 +2023-03-09 18:46:22,885 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.846e+02 2.624e+02 3.040e+02 3.727e+02 1.203e+03, threshold=6.079e+02, percent-clipped=2.0 +2023-03-09 18:46:44,554 INFO [train.py:898] (2/4) Epoch 25, batch 1300, loss[loss=0.1648, simple_loss=0.2551, pruned_loss=0.03726, over 18348.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2496, pruned_loss=0.03459, over 3579522.85 frames. ], batch size: 56, lr: 4.51e-03, grad_scale: 16.0 +2023-03-09 18:47:12,788 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-09 18:47:42,863 INFO [train.py:898] (2/4) Epoch 25, batch 1350, loss[loss=0.1321, simple_loss=0.2226, pruned_loss=0.02081, over 18503.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2492, pruned_loss=0.03431, over 3583455.32 frames. ], batch size: 47, lr: 4.51e-03, grad_scale: 16.0 +2023-03-09 18:48:16,817 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4979, 2.8009, 4.2964, 3.6984, 2.5308, 4.4979, 3.8161, 2.7968], + device='cuda:2'), covar=tensor([0.0564, 0.1525, 0.0298, 0.0446, 0.1681, 0.0222, 0.0580, 0.0972], + device='cuda:2'), in_proj_covar=tensor([0.0217, 0.0245, 0.0227, 0.0170, 0.0228, 0.0218, 0.0257, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 18:48:19,809 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.775e+02 2.523e+02 2.984e+02 3.616e+02 6.843e+02, threshold=5.967e+02, percent-clipped=1.0 +2023-03-09 18:48:25,484 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-09 18:48:41,230 INFO [train.py:898] (2/4) Epoch 25, batch 1400, loss[loss=0.1312, simple_loss=0.2204, pruned_loss=0.02102, over 18393.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.249, pruned_loss=0.03415, over 3581496.78 frames. ], batch size: 42, lr: 4.51e-03, grad_scale: 16.0 +2023-03-09 18:49:01,741 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3683, 2.5282, 3.9943, 3.5891, 2.2677, 4.1712, 3.6439, 2.5313], + device='cuda:2'), covar=tensor([0.0539, 0.1659, 0.0335, 0.0414, 0.1868, 0.0270, 0.0663, 0.1163], + device='cuda:2'), in_proj_covar=tensor([0.0217, 0.0245, 0.0227, 0.0170, 0.0229, 0.0219, 0.0257, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 18:49:17,095 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=88647.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 18:49:39,308 INFO [train.py:898] (2/4) Epoch 25, batch 1450, loss[loss=0.1742, simple_loss=0.2688, pruned_loss=0.03976, over 18293.00 frames. ], tot_loss[loss=0.1584, simple_loss=0.2486, pruned_loss=0.03411, over 3570550.59 frames. ], batch size: 57, lr: 4.51e-03, grad_scale: 8.0 +2023-03-09 18:50:03,375 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 18:50:17,349 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.835e+02 2.449e+02 2.886e+02 3.614e+02 7.165e+02, threshold=5.771e+02, percent-clipped=1.0 +2023-03-09 18:50:37,487 INFO [train.py:898] (2/4) Epoch 25, batch 1500, loss[loss=0.1355, simple_loss=0.2193, pruned_loss=0.02589, over 18464.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2491, pruned_loss=0.03436, over 3563111.66 frames. ], batch size: 44, lr: 4.51e-03, grad_scale: 8.0 +2023-03-09 18:51:08,695 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-03-09 18:51:23,371 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88756.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:51:25,983 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.93 vs. limit=5.0 +2023-03-09 18:51:35,258 INFO [train.py:898] (2/4) Epoch 25, batch 1550, loss[loss=0.1865, simple_loss=0.272, pruned_loss=0.05052, over 13220.00 frames. ], tot_loss[loss=0.1585, simple_loss=0.2488, pruned_loss=0.03407, over 3568134.12 frames. ], batch size: 131, lr: 4.50e-03, grad_scale: 8.0 +2023-03-09 18:51:40,591 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-03-09 18:52:06,728 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-09 18:52:13,192 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.757e+02 2.636e+02 3.082e+02 3.617e+02 8.587e+02, threshold=6.165e+02, percent-clipped=5.0 +2023-03-09 18:52:20,081 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88804.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:52:34,352 INFO [train.py:898] (2/4) Epoch 25, batch 1600, loss[loss=0.1447, simple_loss=0.2264, pruned_loss=0.03155, over 18450.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2486, pruned_loss=0.03397, over 3556487.54 frames. ], batch size: 43, lr: 4.50e-03, grad_scale: 8.0 +2023-03-09 18:53:32,608 INFO [train.py:898] (2/4) Epoch 25, batch 1650, loss[loss=0.1633, simple_loss=0.259, pruned_loss=0.03376, over 18132.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2492, pruned_loss=0.03407, over 3554399.05 frames. ], batch size: 62, lr: 4.50e-03, grad_scale: 8.0 +2023-03-09 18:54:09,628 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.772e+02 2.755e+02 3.165e+02 3.755e+02 1.372e+03, threshold=6.330e+02, percent-clipped=6.0 +2023-03-09 18:54:14,327 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 18:54:30,547 INFO [train.py:898] (2/4) Epoch 25, batch 1700, loss[loss=0.1657, simple_loss=0.2634, pruned_loss=0.03396, over 18300.00 frames. ], tot_loss[loss=0.1585, simple_loss=0.249, pruned_loss=0.03397, over 3566038.60 frames. ], batch size: 49, lr: 4.50e-03, grad_scale: 8.0 +2023-03-09 18:55:04,226 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.77 vs. limit=5.0 +2023-03-09 18:55:04,786 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88947.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 18:55:24,714 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5058, 2.8489, 2.4778, 2.8792, 3.5681, 3.4978, 2.9738, 2.8333], + device='cuda:2'), covar=tensor([0.0179, 0.0272, 0.0577, 0.0368, 0.0187, 0.0156, 0.0407, 0.0443], + device='cuda:2'), in_proj_covar=tensor([0.0143, 0.0142, 0.0166, 0.0164, 0.0138, 0.0123, 0.0160, 0.0162], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 18:55:28,681 INFO [train.py:898] (2/4) Epoch 25, batch 1750, loss[loss=0.156, simple_loss=0.2521, pruned_loss=0.02997, over 18403.00 frames. ], tot_loss[loss=0.1585, simple_loss=0.2489, pruned_loss=0.03406, over 3576365.67 frames. ], batch size: 52, lr: 4.50e-03, grad_scale: 8.0 +2023-03-09 18:55:42,442 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9719, 5.2646, 2.8364, 5.1137, 4.9869, 5.2846, 5.1000, 2.8623], + device='cuda:2'), covar=tensor([0.0219, 0.0094, 0.0781, 0.0091, 0.0082, 0.0073, 0.0101, 0.0945], + device='cuda:2'), in_proj_covar=tensor([0.0092, 0.0083, 0.0098, 0.0098, 0.0089, 0.0078, 0.0086, 0.0098], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 18:55:45,616 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5673, 3.3508, 2.2915, 4.2647, 3.0864, 4.1074, 2.3666, 3.8596], + device='cuda:2'), covar=tensor([0.0664, 0.0883, 0.1411, 0.0552, 0.0804, 0.0283, 0.1274, 0.0441], + device='cuda:2'), in_proj_covar=tensor([0.0219, 0.0230, 0.0193, 0.0292, 0.0195, 0.0269, 0.0205, 0.0205], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 18:56:00,685 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=88995.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 18:56:04,899 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.658e+02 2.478e+02 2.880e+02 3.518e+02 7.008e+02, threshold=5.760e+02, percent-clipped=1.0 +2023-03-09 18:56:27,126 INFO [train.py:898] (2/4) Epoch 25, batch 1800, loss[loss=0.2133, simple_loss=0.2981, pruned_loss=0.06429, over 12930.00 frames. ], tot_loss[loss=0.1581, simple_loss=0.2482, pruned_loss=0.03403, over 3586800.46 frames. ], batch size: 130, lr: 4.50e-03, grad_scale: 4.0 +2023-03-09 18:56:27,420 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3441, 5.3316, 4.9734, 5.2787, 5.2840, 4.6849, 5.1797, 4.9073], + device='cuda:2'), covar=tensor([0.0495, 0.0470, 0.1413, 0.0779, 0.0643, 0.0457, 0.0436, 0.1150], + device='cuda:2'), in_proj_covar=tensor([0.0514, 0.0581, 0.0721, 0.0453, 0.0476, 0.0526, 0.0565, 0.0702], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 18:56:34,295 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7432, 3.7963, 3.5395, 3.1828, 3.4390, 2.8333, 2.8702, 3.7566], + device='cuda:2'), covar=tensor([0.0075, 0.0088, 0.0095, 0.0143, 0.0102, 0.0198, 0.0199, 0.0071], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0167, 0.0139, 0.0191, 0.0147, 0.0180, 0.0186, 0.0127], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 18:56:38,814 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89027.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:57:25,693 INFO [train.py:898] (2/4) Epoch 25, batch 1850, loss[loss=0.1653, simple_loss=0.255, pruned_loss=0.03784, over 18489.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2485, pruned_loss=0.03394, over 3588540.37 frames. ], batch size: 59, lr: 4.50e-03, grad_scale: 4.0 +2023-03-09 18:57:28,372 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7915, 2.9769, 2.7260, 3.1040, 3.7688, 3.7495, 3.2698, 3.0605], + device='cuda:2'), covar=tensor([0.0169, 0.0296, 0.0500, 0.0376, 0.0191, 0.0138, 0.0368, 0.0403], + device='cuda:2'), in_proj_covar=tensor([0.0143, 0.0143, 0.0166, 0.0165, 0.0139, 0.0123, 0.0160, 0.0163], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 18:57:42,963 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0074, 5.0393, 5.1368, 4.7915, 4.7259, 4.7913, 5.1661, 5.1981], + device='cuda:2'), covar=tensor([0.0063, 0.0060, 0.0052, 0.0105, 0.0067, 0.0169, 0.0069, 0.0090], + device='cuda:2'), in_proj_covar=tensor([0.0097, 0.0072, 0.0078, 0.0096, 0.0077, 0.0107, 0.0089, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 18:57:49,941 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89088.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:58:03,153 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.649e+02 2.662e+02 3.134e+02 3.843e+02 1.322e+03, threshold=6.269e+02, percent-clipped=2.0 +2023-03-09 18:58:11,913 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8175, 3.5875, 4.8082, 4.2284, 3.4081, 2.9611, 4.3196, 5.0319], + device='cuda:2'), covar=tensor([0.0778, 0.1355, 0.0236, 0.0464, 0.0904, 0.1243, 0.0396, 0.0320], + device='cuda:2'), in_proj_covar=tensor([0.0151, 0.0278, 0.0164, 0.0184, 0.0194, 0.0192, 0.0197, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 18:58:15,124 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8203, 5.2357, 5.2438, 5.2261, 4.6695, 5.1060, 4.6287, 5.1663], + device='cuda:2'), covar=tensor([0.0245, 0.0277, 0.0189, 0.0448, 0.0425, 0.0272, 0.1069, 0.0304], + device='cuda:2'), in_proj_covar=tensor([0.0228, 0.0272, 0.0272, 0.0348, 0.0284, 0.0282, 0.0316, 0.0273], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 18:58:23,379 INFO [train.py:898] (2/4) Epoch 25, batch 1900, loss[loss=0.1556, simple_loss=0.2411, pruned_loss=0.03509, over 18499.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2488, pruned_loss=0.03434, over 3565635.53 frames. ], batch size: 47, lr: 4.50e-03, grad_scale: 4.0 +2023-03-09 18:59:22,368 INFO [train.py:898] (2/4) Epoch 25, batch 1950, loss[loss=0.1498, simple_loss=0.2321, pruned_loss=0.03376, over 18440.00 frames. ], tot_loss[loss=0.1592, simple_loss=0.2494, pruned_loss=0.03447, over 3579066.35 frames. ], batch size: 43, lr: 4.49e-03, grad_scale: 4.0 +2023-03-09 18:59:25,443 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89169.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 18:59:49,558 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.6110, 6.1457, 5.5941, 5.9731, 5.7381, 5.6271, 6.2391, 6.1832], + device='cuda:2'), covar=tensor([0.1253, 0.0708, 0.0462, 0.0685, 0.1376, 0.0796, 0.0597, 0.0668], + device='cuda:2'), in_proj_covar=tensor([0.0622, 0.0542, 0.0395, 0.0574, 0.0766, 0.0565, 0.0781, 0.0591], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 18:59:51,423 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6978, 2.5410, 2.5279, 2.7281, 2.9361, 3.7406, 3.6811, 3.0163], + device='cuda:2'), covar=tensor([0.1812, 0.2376, 0.2718, 0.1777, 0.2250, 0.0477, 0.0571, 0.0975], + device='cuda:2'), in_proj_covar=tensor([0.0319, 0.0358, 0.0400, 0.0288, 0.0395, 0.0257, 0.0302, 0.0267], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 19:00:00,689 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.906e+02 2.686e+02 3.109e+02 3.749e+02 7.120e+02, threshold=6.217e+02, percent-clipped=3.0 +2023-03-09 19:00:20,481 INFO [train.py:898] (2/4) Epoch 25, batch 2000, loss[loss=0.1707, simple_loss=0.2643, pruned_loss=0.03854, over 18560.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.249, pruned_loss=0.03439, over 3576647.77 frames. ], batch size: 54, lr: 4.49e-03, grad_scale: 8.0 +2023-03-09 19:00:36,516 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89230.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:01:18,565 INFO [train.py:898] (2/4) Epoch 25, batch 2050, loss[loss=0.1613, simple_loss=0.2557, pruned_loss=0.03338, over 17095.00 frames. ], tot_loss[loss=0.159, simple_loss=0.2491, pruned_loss=0.03444, over 3582190.79 frames. ], batch size: 78, lr: 4.49e-03, grad_scale: 8.0 +2023-03-09 19:01:42,993 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4398, 5.3885, 5.0346, 5.3908, 5.3451, 4.7673, 5.2715, 5.0268], + device='cuda:2'), covar=tensor([0.0422, 0.0475, 0.1281, 0.0722, 0.0587, 0.0439, 0.0397, 0.0984], + device='cuda:2'), in_proj_covar=tensor([0.0514, 0.0578, 0.0718, 0.0450, 0.0472, 0.0524, 0.0563, 0.0698], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 19:01:57,828 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.761e+02 2.663e+02 3.151e+02 3.828e+02 7.716e+02, threshold=6.301e+02, percent-clipped=2.0 +2023-03-09 19:02:17,080 INFO [train.py:898] (2/4) Epoch 25, batch 2100, loss[loss=0.1446, simple_loss=0.2309, pruned_loss=0.02919, over 18262.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2492, pruned_loss=0.0348, over 3568681.62 frames. ], batch size: 45, lr: 4.49e-03, grad_scale: 8.0 +2023-03-09 19:02:28,130 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7595, 2.5585, 2.7353, 2.8542, 3.3230, 5.0114, 4.9484, 3.4007], + device='cuda:2'), covar=tensor([0.1993, 0.2422, 0.2950, 0.1800, 0.2382, 0.0251, 0.0325, 0.1013], + device='cuda:2'), in_proj_covar=tensor([0.0319, 0.0357, 0.0399, 0.0287, 0.0394, 0.0257, 0.0302, 0.0266], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 19:02:50,052 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.61 vs. limit=5.0 +2023-03-09 19:03:15,258 INFO [train.py:898] (2/4) Epoch 25, batch 2150, loss[loss=0.1618, simple_loss=0.2538, pruned_loss=0.03489, over 18499.00 frames. ], tot_loss[loss=0.1592, simple_loss=0.249, pruned_loss=0.03468, over 3570119.98 frames. ], batch size: 47, lr: 4.49e-03, grad_scale: 8.0 +2023-03-09 19:03:24,292 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89374.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:03:35,638 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89383.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:03:38,244 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8837, 4.2237, 2.3296, 4.1541, 5.2072, 2.6753, 3.9396, 4.0650], + device='cuda:2'), covar=tensor([0.0182, 0.1017, 0.1687, 0.0638, 0.0100, 0.1184, 0.0642, 0.0675], + device='cuda:2'), in_proj_covar=tensor([0.0177, 0.0277, 0.0207, 0.0201, 0.0136, 0.0187, 0.0221, 0.0230], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 19:03:54,955 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.788e+02 2.675e+02 3.172e+02 3.693e+02 8.174e+02, threshold=6.344e+02, percent-clipped=3.0 +2023-03-09 19:04:15,105 INFO [train.py:898] (2/4) Epoch 25, batch 2200, loss[loss=0.1559, simple_loss=0.2541, pruned_loss=0.02886, over 18396.00 frames. ], tot_loss[loss=0.1592, simple_loss=0.2492, pruned_loss=0.03458, over 3579253.87 frames. ], batch size: 52, lr: 4.49e-03, grad_scale: 8.0 +2023-03-09 19:04:25,020 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89425.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:04:36,868 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89435.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:04:41,878 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89439.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 19:04:46,702 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-09 19:05:13,787 INFO [train.py:898] (2/4) Epoch 25, batch 2250, loss[loss=0.171, simple_loss=0.2651, pruned_loss=0.03843, over 18294.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2496, pruned_loss=0.0346, over 3581762.28 frames. ], batch size: 57, lr: 4.49e-03, grad_scale: 8.0 +2023-03-09 19:05:38,385 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89486.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:05:53,609 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.616e+02 2.577e+02 2.992e+02 3.589e+02 8.023e+02, threshold=5.985e+02, percent-clipped=1.0 +2023-03-09 19:05:53,876 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89500.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 19:06:04,236 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89509.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:06:12,829 INFO [train.py:898] (2/4) Epoch 25, batch 2300, loss[loss=0.1602, simple_loss=0.2445, pruned_loss=0.03798, over 18416.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.2496, pruned_loss=0.03469, over 3580963.64 frames. ], batch size: 48, lr: 4.49e-03, grad_scale: 8.0 +2023-03-09 19:06:20,037 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9039, 5.3408, 2.8426, 5.1235, 5.0378, 5.3298, 5.1443, 2.6119], + device='cuda:2'), covar=tensor([0.0231, 0.0058, 0.0733, 0.0084, 0.0074, 0.0069, 0.0088, 0.1005], + device='cuda:2'), in_proj_covar=tensor([0.0092, 0.0083, 0.0098, 0.0098, 0.0089, 0.0079, 0.0086, 0.0098], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 19:06:22,688 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89525.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:06:47,631 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5846, 6.1211, 5.6617, 6.0064, 5.7470, 5.6046, 6.2047, 6.1342], + device='cuda:2'), covar=tensor([0.1161, 0.0755, 0.0420, 0.0637, 0.1294, 0.0678, 0.0542, 0.0695], + device='cuda:2'), in_proj_covar=tensor([0.0625, 0.0551, 0.0400, 0.0578, 0.0773, 0.0572, 0.0788, 0.0595], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 19:06:53,389 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5863, 2.9106, 2.5189, 2.8922, 3.6534, 3.5413, 3.1809, 2.8617], + device='cuda:2'), covar=tensor([0.0189, 0.0317, 0.0566, 0.0373, 0.0190, 0.0163, 0.0365, 0.0423], + device='cuda:2'), in_proj_covar=tensor([0.0143, 0.0142, 0.0166, 0.0162, 0.0137, 0.0123, 0.0159, 0.0161], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 19:06:54,527 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8334, 3.1166, 2.6979, 3.1472, 3.8319, 3.7357, 3.3818, 3.0704], + device='cuda:2'), covar=tensor([0.0167, 0.0267, 0.0543, 0.0349, 0.0159, 0.0155, 0.0337, 0.0383], + device='cuda:2'), in_proj_covar=tensor([0.0143, 0.0142, 0.0165, 0.0162, 0.0137, 0.0123, 0.0159, 0.0161], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 19:07:11,313 INFO [train.py:898] (2/4) Epoch 25, batch 2350, loss[loss=0.18, simple_loss=0.2687, pruned_loss=0.04568, over 18172.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2499, pruned_loss=0.03448, over 3588982.74 frames. ], batch size: 62, lr: 4.48e-03, grad_scale: 8.0 +2023-03-09 19:07:15,053 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89570.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:07:36,140 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5534, 3.5897, 3.4099, 2.9996, 3.3159, 2.7086, 2.6868, 3.5714], + device='cuda:2'), covar=tensor([0.0078, 0.0093, 0.0088, 0.0180, 0.0110, 0.0208, 0.0227, 0.0077], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0168, 0.0140, 0.0193, 0.0149, 0.0183, 0.0186, 0.0127], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 19:07:38,026 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6391, 2.3314, 2.5728, 2.7123, 3.2473, 4.7570, 4.6994, 3.2609], + device='cuda:2'), covar=tensor([0.2040, 0.2613, 0.3101, 0.1962, 0.2386, 0.0282, 0.0380, 0.1051], + device='cuda:2'), in_proj_covar=tensor([0.0321, 0.0359, 0.0401, 0.0289, 0.0396, 0.0258, 0.0303, 0.0268], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 19:07:43,508 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1900, 5.2255, 5.3541, 5.0352, 5.0334, 5.0445, 5.3684, 5.4313], + device='cuda:2'), covar=tensor([0.0063, 0.0054, 0.0045, 0.0099, 0.0053, 0.0149, 0.0072, 0.0088], + device='cuda:2'), in_proj_covar=tensor([0.0098, 0.0073, 0.0078, 0.0097, 0.0078, 0.0108, 0.0090, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 19:07:51,628 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.602e+02 2.634e+02 3.010e+02 3.571e+02 1.034e+03, threshold=6.019e+02, percent-clipped=2.0 +2023-03-09 19:08:09,867 INFO [train.py:898] (2/4) Epoch 25, batch 2400, loss[loss=0.1707, simple_loss=0.2604, pruned_loss=0.04045, over 17784.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.2503, pruned_loss=0.03468, over 3591940.03 frames. ], batch size: 70, lr: 4.48e-03, grad_scale: 8.0 +2023-03-09 19:09:07,616 INFO [train.py:898] (2/4) Epoch 25, batch 2450, loss[loss=0.1409, simple_loss=0.228, pruned_loss=0.02694, over 18404.00 frames. ], tot_loss[loss=0.1592, simple_loss=0.2496, pruned_loss=0.03441, over 3590521.39 frames. ], batch size: 48, lr: 4.48e-03, grad_scale: 4.0 +2023-03-09 19:09:23,475 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89680.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:09:26,963 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89683.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:09:49,624 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.687e+02 2.547e+02 3.016e+02 3.415e+02 5.579e+02, threshold=6.031e+02, percent-clipped=0.0 +2023-03-09 19:10:02,390 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8684, 4.7824, 4.9315, 4.5566, 4.5886, 4.7748, 5.0290, 4.9047], + device='cuda:2'), covar=tensor([0.0093, 0.0114, 0.0110, 0.0173, 0.0109, 0.0225, 0.0121, 0.0158], + device='cuda:2'), in_proj_covar=tensor([0.0099, 0.0074, 0.0079, 0.0098, 0.0078, 0.0108, 0.0091, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 19:10:06,506 INFO [train.py:898] (2/4) Epoch 25, batch 2500, loss[loss=0.2033, simple_loss=0.2804, pruned_loss=0.06308, over 12056.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2497, pruned_loss=0.03441, over 3592597.22 frames. ], batch size: 129, lr: 4.48e-03, grad_scale: 4.0 +2023-03-09 19:10:21,696 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89730.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:10:22,815 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89731.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:10:34,990 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89741.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:10:53,007 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 19:11:04,383 INFO [train.py:898] (2/4) Epoch 25, batch 2550, loss[loss=0.1637, simple_loss=0.2637, pruned_loss=0.03187, over 18579.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.2499, pruned_loss=0.03457, over 3594552.71 frames. ], batch size: 54, lr: 4.48e-03, grad_scale: 4.0 +2023-03-09 19:11:20,654 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89781.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:11:37,072 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89795.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 19:11:45,427 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.598e+02 2.460e+02 2.989e+02 3.597e+02 5.800e+02, threshold=5.978e+02, percent-clipped=0.0 +2023-03-09 19:12:03,019 INFO [train.py:898] (2/4) Epoch 25, batch 2600, loss[loss=0.1485, simple_loss=0.2396, pruned_loss=0.02869, over 18282.00 frames. ], tot_loss[loss=0.1599, simple_loss=0.2503, pruned_loss=0.03473, over 3600365.56 frames. ], batch size: 49, lr: 4.48e-03, grad_scale: 4.0 +2023-03-09 19:12:12,966 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89825.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:12:19,340 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-03-09 19:12:59,249 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89865.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:12:59,400 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89865.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:13:01,342 INFO [train.py:898] (2/4) Epoch 25, batch 2650, loss[loss=0.15, simple_loss=0.2479, pruned_loss=0.02605, over 18360.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.25, pruned_loss=0.03444, over 3609608.49 frames. ], batch size: 55, lr: 4.48e-03, grad_scale: 4.0 +2023-03-09 19:13:08,337 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=89873.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:13:35,499 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6067, 2.3343, 2.5854, 2.7346, 3.1525, 4.9394, 4.8343, 3.5967], + device='cuda:2'), covar=tensor([0.2096, 0.2743, 0.3132, 0.1991, 0.2626, 0.0262, 0.0362, 0.0914], + device='cuda:2'), in_proj_covar=tensor([0.0321, 0.0358, 0.0400, 0.0288, 0.0395, 0.0258, 0.0302, 0.0268], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 19:13:41,753 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.822e+02 2.660e+02 3.253e+02 3.912e+02 1.582e+03, threshold=6.506e+02, percent-clipped=3.0 +2023-03-09 19:13:56,702 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3158, 3.3202, 3.1662, 2.9702, 3.1442, 2.6366, 2.5936, 3.2825], + device='cuda:2'), covar=tensor([0.0079, 0.0092, 0.0094, 0.0123, 0.0099, 0.0176, 0.0206, 0.0087], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0169, 0.0142, 0.0193, 0.0150, 0.0184, 0.0188, 0.0128], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 19:13:59,661 INFO [train.py:898] (2/4) Epoch 25, batch 2700, loss[loss=0.1587, simple_loss=0.251, pruned_loss=0.03314, over 18362.00 frames. ], tot_loss[loss=0.1592, simple_loss=0.2498, pruned_loss=0.03428, over 3606004.96 frames. ], batch size: 55, lr: 4.48e-03, grad_scale: 4.0 +2023-03-09 19:14:06,696 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4167, 5.2757, 5.6849, 5.6954, 5.4082, 6.1616, 5.9074, 5.3754], + device='cuda:2'), covar=tensor([0.1140, 0.0639, 0.0714, 0.0670, 0.1213, 0.0695, 0.0522, 0.1677], + device='cuda:2'), in_proj_covar=tensor([0.0369, 0.0299, 0.0325, 0.0327, 0.0339, 0.0440, 0.0292, 0.0430], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 19:14:10,048 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89926.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 19:14:41,678 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4095, 2.7710, 2.4243, 2.7781, 3.4595, 3.3799, 3.0208, 2.7218], + device='cuda:2'), covar=tensor([0.0204, 0.0285, 0.0601, 0.0403, 0.0216, 0.0177, 0.0371, 0.0423], + device='cuda:2'), in_proj_covar=tensor([0.0142, 0.0142, 0.0165, 0.0163, 0.0138, 0.0123, 0.0158, 0.0161], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 19:14:42,772 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4464, 2.3001, 4.2275, 3.7541, 2.2668, 4.3719, 3.8229, 2.6337], + device='cuda:2'), covar=tensor([0.0531, 0.2127, 0.0301, 0.0409, 0.2105, 0.0308, 0.0595, 0.1259], + device='cuda:2'), in_proj_covar=tensor([0.0220, 0.0246, 0.0229, 0.0171, 0.0228, 0.0220, 0.0259, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 19:14:58,044 INFO [train.py:898] (2/4) Epoch 25, batch 2750, loss[loss=0.1521, simple_loss=0.248, pruned_loss=0.02805, over 17734.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2496, pruned_loss=0.03445, over 3603341.42 frames. ], batch size: 70, lr: 4.47e-03, grad_scale: 4.0 +2023-03-09 19:15:43,121 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.861e+02 2.579e+02 2.972e+02 3.453e+02 7.499e+02, threshold=5.944e+02, percent-clipped=1.0 +2023-03-09 19:16:00,220 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4454, 2.9290, 4.2980, 3.6453, 2.7413, 4.4515, 3.8221, 2.8514], + device='cuda:2'), covar=tensor([0.0585, 0.1404, 0.0283, 0.0481, 0.1591, 0.0236, 0.0609, 0.0990], + device='cuda:2'), in_proj_covar=tensor([0.0220, 0.0246, 0.0229, 0.0171, 0.0229, 0.0220, 0.0258, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 19:16:00,873 INFO [train.py:898] (2/4) Epoch 25, batch 2800, loss[loss=0.181, simple_loss=0.277, pruned_loss=0.0425, over 18084.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2497, pruned_loss=0.03439, over 3610811.16 frames. ], batch size: 62, lr: 4.47e-03, grad_scale: 8.0 +2023-03-09 19:16:02,314 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1469, 5.2296, 5.3051, 5.0051, 5.0326, 4.9830, 5.3265, 5.3945], + device='cuda:2'), covar=tensor([0.0060, 0.0058, 0.0044, 0.0094, 0.0047, 0.0173, 0.0063, 0.0066], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0074, 0.0079, 0.0098, 0.0078, 0.0109, 0.0091, 0.0090], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 19:16:15,976 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90030.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:16:22,599 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90036.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:16:49,483 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7953, 4.4662, 4.4488, 3.2975, 3.6370, 3.4408, 2.8032, 2.5378], + device='cuda:2'), covar=tensor([0.0245, 0.0154, 0.0094, 0.0346, 0.0357, 0.0246, 0.0694, 0.0857], + device='cuda:2'), in_proj_covar=tensor([0.0075, 0.0063, 0.0068, 0.0071, 0.0093, 0.0071, 0.0080, 0.0086], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 19:16:57,960 INFO [train.py:898] (2/4) Epoch 25, batch 2850, loss[loss=0.1783, simple_loss=0.2689, pruned_loss=0.0439, over 17969.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.2501, pruned_loss=0.03447, over 3616909.96 frames. ], batch size: 65, lr: 4.47e-03, grad_scale: 8.0 +2023-03-09 19:17:11,248 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90078.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:17:14,790 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90081.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:17:31,025 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90095.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 19:17:38,648 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.912e+02 2.603e+02 3.094e+02 3.729e+02 6.556e+02, threshold=6.188e+02, percent-clipped=2.0 +2023-03-09 19:17:56,097 INFO [train.py:898] (2/4) Epoch 25, batch 2900, loss[loss=0.1675, simple_loss=0.2612, pruned_loss=0.03691, over 18400.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2494, pruned_loss=0.03443, over 3617854.83 frames. ], batch size: 52, lr: 4.47e-03, grad_scale: 8.0 +2023-03-09 19:18:10,711 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90129.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:18:12,017 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90130.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:18:26,753 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90143.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 19:18:52,962 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90165.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:18:54,894 INFO [train.py:898] (2/4) Epoch 25, batch 2950, loss[loss=0.1585, simple_loss=0.2587, pruned_loss=0.02916, over 16138.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2492, pruned_loss=0.03445, over 3601423.63 frames. ], batch size: 94, lr: 4.47e-03, grad_scale: 8.0 +2023-03-09 19:19:01,035 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-09 19:19:01,284 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.63 vs. limit=5.0 +2023-03-09 19:19:23,379 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90191.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:19:36,176 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.680e+02 2.539e+02 2.924e+02 3.398e+02 8.084e+02, threshold=5.847e+02, percent-clipped=3.0 +2023-03-09 19:19:48,869 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90213.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:19:53,268 INFO [train.py:898] (2/4) Epoch 25, batch 3000, loss[loss=0.1261, simple_loss=0.2092, pruned_loss=0.02146, over 18464.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2491, pruned_loss=0.03483, over 3589993.99 frames. ], batch size: 43, lr: 4.47e-03, grad_scale: 8.0 +2023-03-09 19:19:53,269 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 19:20:05,341 INFO [train.py:932] (2/4) Epoch 25, validation: loss=0.1501, simple_loss=0.2485, pruned_loss=0.02584, over 944034.00 frames. +2023-03-09 19:20:05,342 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 19:20:10,001 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90221.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 19:20:22,544 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90232.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:21:03,799 INFO [train.py:898] (2/4) Epoch 25, batch 3050, loss[loss=0.158, simple_loss=0.2517, pruned_loss=0.03218, over 17713.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.2493, pruned_loss=0.03489, over 3582406.74 frames. ], batch size: 70, lr: 4.47e-03, grad_scale: 8.0 +2023-03-09 19:21:34,155 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90293.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:21:44,553 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.813e+02 2.699e+02 3.154e+02 3.887e+02 1.496e+03, threshold=6.309e+02, percent-clipped=8.0 +2023-03-09 19:22:02,751 INFO [train.py:898] (2/4) Epoch 25, batch 3100, loss[loss=0.1546, simple_loss=0.2393, pruned_loss=0.03498, over 18504.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2489, pruned_loss=0.03467, over 3578465.12 frames. ], batch size: 47, lr: 4.47e-03, grad_scale: 8.0 +2023-03-09 19:22:24,189 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90336.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:22:42,322 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90351.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:23:00,976 INFO [train.py:898] (2/4) Epoch 25, batch 3150, loss[loss=0.1533, simple_loss=0.2432, pruned_loss=0.03172, over 18488.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2489, pruned_loss=0.03464, over 3580837.74 frames. ], batch size: 51, lr: 4.46e-03, grad_scale: 4.0 +2023-03-09 19:23:19,315 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9806, 4.6191, 4.6101, 3.4996, 3.8252, 3.7138, 2.9712, 2.6359], + device='cuda:2'), covar=tensor([0.0196, 0.0128, 0.0080, 0.0287, 0.0288, 0.0200, 0.0615, 0.0790], + device='cuda:2'), in_proj_covar=tensor([0.0074, 0.0062, 0.0067, 0.0070, 0.0092, 0.0070, 0.0079, 0.0085], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 19:23:20,278 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90384.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:23:33,340 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4983, 2.8205, 4.2110, 3.6362, 2.6585, 4.3796, 3.7845, 2.5673], + device='cuda:2'), covar=tensor([0.0497, 0.1362, 0.0261, 0.0423, 0.1466, 0.0211, 0.0595, 0.1109], + device='cuda:2'), in_proj_covar=tensor([0.0217, 0.0244, 0.0227, 0.0169, 0.0225, 0.0218, 0.0256, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 19:23:42,939 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.817e+02 2.713e+02 3.340e+02 4.151e+02 5.769e+02, threshold=6.681e+02, percent-clipped=0.0 +2023-03-09 19:23:44,817 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-09 19:23:53,115 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90412.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:23:59,427 INFO [train.py:898] (2/4) Epoch 25, batch 3200, loss[loss=0.1942, simple_loss=0.2817, pruned_loss=0.05337, over 12062.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.2485, pruned_loss=0.03432, over 3585497.41 frames. ], batch size: 130, lr: 4.46e-03, grad_scale: 8.0 +2023-03-09 19:24:13,283 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5792, 2.8972, 4.3924, 3.6067, 2.7974, 4.6077, 3.8948, 2.6917], + device='cuda:2'), covar=tensor([0.0535, 0.1393, 0.0280, 0.0489, 0.1379, 0.0199, 0.0566, 0.1031], + device='cuda:2'), in_proj_covar=tensor([0.0218, 0.0244, 0.0229, 0.0170, 0.0226, 0.0219, 0.0258, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 19:24:48,305 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90459.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:24:57,515 INFO [train.py:898] (2/4) Epoch 25, batch 3250, loss[loss=0.1599, simple_loss=0.2538, pruned_loss=0.03301, over 17190.00 frames. ], tot_loss[loss=0.159, simple_loss=0.2492, pruned_loss=0.03439, over 3585889.64 frames. ], batch size: 78, lr: 4.46e-03, grad_scale: 8.0 +2023-03-09 19:25:19,138 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90486.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:25:31,141 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90496.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:25:38,659 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.740e+02 2.752e+02 3.132e+02 3.659e+02 1.386e+03, threshold=6.263e+02, percent-clipped=2.0 +2023-03-09 19:25:54,837 INFO [train.py:898] (2/4) Epoch 25, batch 3300, loss[loss=0.1425, simple_loss=0.2317, pruned_loss=0.02669, over 18498.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2491, pruned_loss=0.0344, over 3587815.03 frames. ], batch size: 47, lr: 4.46e-03, grad_scale: 8.0 +2023-03-09 19:25:58,362 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.48 vs. limit=2.0 +2023-03-09 19:25:59,807 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90520.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:26:00,803 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90521.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 19:26:42,200 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90557.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:26:52,890 INFO [train.py:898] (2/4) Epoch 25, batch 3350, loss[loss=0.1774, simple_loss=0.2742, pruned_loss=0.04033, over 18354.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2487, pruned_loss=0.03429, over 3592592.45 frames. ], batch size: 56, lr: 4.46e-03, grad_scale: 8.0 +2023-03-09 19:26:55,394 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90569.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:27:17,361 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90588.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:27:34,779 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.869e+02 2.603e+02 3.023e+02 3.526e+02 7.217e+02, threshold=6.047e+02, percent-clipped=2.0 +2023-03-09 19:27:42,727 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-03-09 19:27:50,772 INFO [train.py:898] (2/4) Epoch 25, batch 3400, loss[loss=0.1353, simple_loss=0.2191, pruned_loss=0.02579, over 18497.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2493, pruned_loss=0.03447, over 3587514.28 frames. ], batch size: 44, lr: 4.46e-03, grad_scale: 8.0 +2023-03-09 19:28:18,031 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4471, 6.0850, 5.4915, 5.8784, 5.6802, 5.4413, 6.1407, 6.0882], + device='cuda:2'), covar=tensor([0.1113, 0.0636, 0.0587, 0.0668, 0.1283, 0.0745, 0.0515, 0.0607], + device='cuda:2'), in_proj_covar=tensor([0.0627, 0.0557, 0.0404, 0.0580, 0.0778, 0.0575, 0.0795, 0.0599], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 19:28:49,669 INFO [train.py:898] (2/4) Epoch 25, batch 3450, loss[loss=0.1706, simple_loss=0.2633, pruned_loss=0.03901, over 18487.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2492, pruned_loss=0.03447, over 3591810.58 frames. ], batch size: 51, lr: 4.46e-03, grad_scale: 8.0 +2023-03-09 19:28:50,529 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.89 vs. limit=5.0 +2023-03-09 19:29:17,431 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6263, 3.6069, 3.4580, 3.1025, 3.4525, 2.8553, 2.7423, 3.6362], + device='cuda:2'), covar=tensor([0.0066, 0.0097, 0.0089, 0.0135, 0.0091, 0.0183, 0.0199, 0.0065], + device='cuda:2'), in_proj_covar=tensor([0.0151, 0.0170, 0.0143, 0.0194, 0.0150, 0.0185, 0.0188, 0.0130], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 19:29:31,657 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.836e+02 2.461e+02 2.925e+02 3.622e+02 7.224e+02, threshold=5.850e+02, percent-clipped=2.0 +2023-03-09 19:29:37,280 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90707.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:29:48,693 INFO [train.py:898] (2/4) Epoch 25, batch 3500, loss[loss=0.1697, simple_loss=0.2615, pruned_loss=0.03892, over 18496.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2482, pruned_loss=0.03414, over 3592285.67 frames. ], batch size: 51, lr: 4.46e-03, grad_scale: 8.0 +2023-03-09 19:30:44,126 INFO [train.py:898] (2/4) Epoch 25, batch 3550, loss[loss=0.1711, simple_loss=0.2656, pruned_loss=0.03832, over 18301.00 frames. ], tot_loss[loss=0.1581, simple_loss=0.2483, pruned_loss=0.03397, over 3597610.59 frames. ], batch size: 54, lr: 4.45e-03, grad_scale: 8.0 +2023-03-09 19:30:55,321 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-09 19:31:04,157 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90786.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:31:22,415 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.855e+02 2.570e+02 3.030e+02 3.622e+02 8.486e+02, threshold=6.060e+02, percent-clipped=3.0 +2023-03-09 19:31:35,459 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90815.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:31:37,536 INFO [train.py:898] (2/4) Epoch 25, batch 3600, loss[loss=0.1603, simple_loss=0.2533, pruned_loss=0.03367, over 17941.00 frames. ], tot_loss[loss=0.1584, simple_loss=0.2486, pruned_loss=0.0341, over 3592503.01 frames. ], batch size: 65, lr: 4.45e-03, grad_scale: 8.0 +2023-03-09 19:31:56,452 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90834.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:31:58,713 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8183, 3.2041, 4.5475, 3.7520, 2.7706, 4.7555, 4.0320, 3.1588], + device='cuda:2'), covar=tensor([0.0416, 0.1133, 0.0245, 0.0472, 0.1474, 0.0205, 0.0493, 0.0799], + device='cuda:2'), in_proj_covar=tensor([0.0215, 0.0243, 0.0226, 0.0169, 0.0226, 0.0217, 0.0254, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 19:32:11,877 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90849.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:32:40,272 INFO [train.py:898] (2/4) Epoch 26, batch 0, loss[loss=0.1407, simple_loss=0.2236, pruned_loss=0.02894, over 17697.00 frames. ], tot_loss[loss=0.1407, simple_loss=0.2236, pruned_loss=0.02894, over 17697.00 frames. ], batch size: 39, lr: 4.36e-03, grad_scale: 8.0 +2023-03-09 19:32:40,273 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 19:32:48,830 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0696, 2.3536, 3.3532, 2.9732, 2.1694, 3.4709, 3.2661, 2.3032], + device='cuda:2'), covar=tensor([0.0627, 0.1584, 0.0500, 0.0582, 0.1849, 0.0375, 0.0771, 0.1085], + device='cuda:2'), in_proj_covar=tensor([0.0214, 0.0242, 0.0225, 0.0169, 0.0225, 0.0216, 0.0253, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 19:32:52,307 INFO [train.py:932] (2/4) Epoch 26, validation: loss=0.1501, simple_loss=0.2487, pruned_loss=0.02573, over 944034.00 frames. +2023-03-09 19:32:52,307 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 19:32:53,490 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90852.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:33:00,034 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8066, 3.0442, 2.7240, 3.0653, 3.8655, 3.7930, 3.4607, 3.0386], + device='cuda:2'), covar=tensor([0.0167, 0.0305, 0.0558, 0.0355, 0.0158, 0.0147, 0.0296, 0.0381], + device='cuda:2'), in_proj_covar=tensor([0.0142, 0.0141, 0.0163, 0.0161, 0.0137, 0.0121, 0.0158, 0.0160], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 19:33:35,255 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90888.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:33:49,735 INFO [train.py:898] (2/4) Epoch 26, batch 50, loss[loss=0.1605, simple_loss=0.2563, pruned_loss=0.03235, over 17110.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2431, pruned_loss=0.03225, over 820680.47 frames. ], batch size: 78, lr: 4.36e-03, grad_scale: 8.0 +2023-03-09 19:33:52,051 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.759e+02 2.622e+02 3.196e+02 4.102e+02 7.514e+02, threshold=6.391e+02, percent-clipped=4.0 +2023-03-09 19:34:01,618 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90910.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:34:31,654 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=90936.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:34:44,422 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5970, 2.4019, 2.4925, 2.5984, 2.9220, 4.1782, 4.1572, 3.0978], + device='cuda:2'), covar=tensor([0.2079, 0.2588, 0.3037, 0.2077, 0.2653, 0.0392, 0.0455, 0.1095], + device='cuda:2'), in_proj_covar=tensor([0.0318, 0.0356, 0.0400, 0.0286, 0.0392, 0.0256, 0.0298, 0.0266], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 19:34:48,576 INFO [train.py:898] (2/4) Epoch 26, batch 100, loss[loss=0.1384, simple_loss=0.2238, pruned_loss=0.0265, over 18489.00 frames. ], tot_loss[loss=0.1552, simple_loss=0.245, pruned_loss=0.03269, over 1433394.14 frames. ], batch size: 44, lr: 4.36e-03, grad_scale: 8.0 +2023-03-09 19:35:47,458 INFO [train.py:898] (2/4) Epoch 26, batch 150, loss[loss=0.1427, simple_loss=0.2277, pruned_loss=0.02889, over 18267.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.2478, pruned_loss=0.03399, over 1897425.55 frames. ], batch size: 47, lr: 4.36e-03, grad_scale: 8.0 +2023-03-09 19:35:49,744 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.912e+02 2.712e+02 3.182e+02 3.692e+02 5.749e+02, threshold=6.364e+02, percent-clipped=0.0 +2023-03-09 19:35:54,636 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91007.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:36:20,219 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1799, 5.2071, 5.4311, 5.4771, 5.0460, 5.9431, 5.5796, 5.1795], + device='cuda:2'), covar=tensor([0.1032, 0.0647, 0.0823, 0.0738, 0.1365, 0.0710, 0.0682, 0.1801], + device='cuda:2'), in_proj_covar=tensor([0.0366, 0.0297, 0.0323, 0.0325, 0.0337, 0.0437, 0.0292, 0.0431], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 19:36:28,097 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-03-09 19:36:46,108 INFO [train.py:898] (2/4) Epoch 26, batch 200, loss[loss=0.1622, simple_loss=0.2595, pruned_loss=0.0324, over 18621.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.249, pruned_loss=0.03419, over 2282463.55 frames. ], batch size: 52, lr: 4.36e-03, grad_scale: 8.0 +2023-03-09 19:36:49,194 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-03-09 19:36:50,884 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91055.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:37:31,624 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8503, 4.5868, 4.6570, 3.4739, 3.7879, 3.5573, 2.9313, 2.8319], + device='cuda:2'), covar=tensor([0.0218, 0.0139, 0.0066, 0.0311, 0.0318, 0.0239, 0.0649, 0.0737], + device='cuda:2'), in_proj_covar=tensor([0.0073, 0.0062, 0.0067, 0.0070, 0.0091, 0.0069, 0.0078, 0.0084], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 19:37:43,764 INFO [train.py:898] (2/4) Epoch 26, batch 250, loss[loss=0.1783, simple_loss=0.2713, pruned_loss=0.04265, over 17126.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.25, pruned_loss=0.0346, over 2561986.12 frames. ], batch size: 78, lr: 4.36e-03, grad_scale: 8.0 +2023-03-09 19:37:45,995 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.600e+02 2.590e+02 3.099e+02 3.572e+02 5.022e+02, threshold=6.197e+02, percent-clipped=0.0 +2023-03-09 19:37:59,558 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91115.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:38:35,935 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-09 19:38:41,427 INFO [train.py:898] (2/4) Epoch 26, batch 300, loss[loss=0.1579, simple_loss=0.2521, pruned_loss=0.03179, over 18571.00 frames. ], tot_loss[loss=0.1585, simple_loss=0.2489, pruned_loss=0.03402, over 2804250.32 frames. ], batch size: 54, lr: 4.36e-03, grad_scale: 8.0 +2023-03-09 19:38:42,819 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91152.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:38:55,163 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91163.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:39:06,740 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8130, 4.8845, 4.8991, 4.6409, 4.6482, 4.6109, 4.9813, 5.0254], + device='cuda:2'), covar=tensor([0.0079, 0.0067, 0.0063, 0.0119, 0.0072, 0.0175, 0.0076, 0.0090], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0074, 0.0079, 0.0098, 0.0078, 0.0108, 0.0090, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 19:39:39,111 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91200.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:39:40,111 INFO [train.py:898] (2/4) Epoch 26, batch 350, loss[loss=0.1396, simple_loss=0.2163, pruned_loss=0.03142, over 18444.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.249, pruned_loss=0.03405, over 2983266.60 frames. ], batch size: 43, lr: 4.36e-03, grad_scale: 8.0 +2023-03-09 19:39:42,442 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.808e+02 2.557e+02 3.001e+02 3.591e+02 6.784e+02, threshold=6.003e+02, percent-clipped=1.0 +2023-03-09 19:39:44,920 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91205.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:40:15,342 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5670, 3.3537, 2.2498, 4.2651, 2.9172, 4.0481, 2.4235, 3.7247], + device='cuda:2'), covar=tensor([0.0630, 0.0908, 0.1495, 0.0528, 0.0959, 0.0315, 0.1270, 0.0459], + device='cuda:2'), in_proj_covar=tensor([0.0223, 0.0232, 0.0195, 0.0296, 0.0197, 0.0272, 0.0208, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 19:40:38,716 INFO [train.py:898] (2/4) Epoch 26, batch 400, loss[loss=0.1667, simple_loss=0.2535, pruned_loss=0.0399, over 18334.00 frames. ], tot_loss[loss=0.1591, simple_loss=0.2496, pruned_loss=0.03434, over 3124126.80 frames. ], batch size: 56, lr: 4.35e-03, grad_scale: 8.0 +2023-03-09 19:41:18,914 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91285.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:41:37,899 INFO [train.py:898] (2/4) Epoch 26, batch 450, loss[loss=0.1756, simple_loss=0.2629, pruned_loss=0.04417, over 17981.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.2501, pruned_loss=0.03443, over 3220041.79 frames. ], batch size: 65, lr: 4.35e-03, grad_scale: 8.0 +2023-03-09 19:41:40,037 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.596e+02 2.481e+02 2.873e+02 3.390e+02 6.237e+02, threshold=5.746e+02, percent-clipped=1.0 +2023-03-09 19:41:51,799 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7800, 3.7524, 5.1375, 4.5229, 3.4549, 3.1176, 4.5818, 5.3477], + device='cuda:2'), covar=tensor([0.0813, 0.1572, 0.0176, 0.0371, 0.0987, 0.1168, 0.0343, 0.0219], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0280, 0.0165, 0.0185, 0.0194, 0.0193, 0.0199, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 19:42:09,713 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-03-09 19:42:10,681 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7690, 4.5004, 4.5429, 3.4018, 3.6563, 3.3909, 2.5668, 2.6094], + device='cuda:2'), covar=tensor([0.0215, 0.0125, 0.0084, 0.0303, 0.0341, 0.0248, 0.0741, 0.0794], + device='cuda:2'), in_proj_covar=tensor([0.0074, 0.0063, 0.0067, 0.0070, 0.0092, 0.0070, 0.0079, 0.0085], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 19:42:29,821 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91346.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:42:35,722 INFO [train.py:898] (2/4) Epoch 26, batch 500, loss[loss=0.1474, simple_loss=0.2378, pruned_loss=0.02854, over 18269.00 frames. ], tot_loss[loss=0.1585, simple_loss=0.2494, pruned_loss=0.03383, over 3301417.77 frames. ], batch size: 47, lr: 4.35e-03, grad_scale: 8.0 +2023-03-09 19:43:32,662 INFO [train.py:898] (2/4) Epoch 26, batch 550, loss[loss=0.1766, simple_loss=0.2664, pruned_loss=0.04343, over 17861.00 frames. ], tot_loss[loss=0.1588, simple_loss=0.2497, pruned_loss=0.03399, over 3366889.19 frames. ], batch size: 70, lr: 4.35e-03, grad_scale: 8.0 +2023-03-09 19:43:35,352 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.676e+02 2.412e+02 2.843e+02 3.584e+02 8.267e+02, threshold=5.686e+02, percent-clipped=4.0 +2023-03-09 19:43:52,859 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-09 19:44:30,264 INFO [train.py:898] (2/4) Epoch 26, batch 600, loss[loss=0.1289, simple_loss=0.2133, pruned_loss=0.0222, over 18418.00 frames. ], tot_loss[loss=0.1594, simple_loss=0.2502, pruned_loss=0.03433, over 3404582.36 frames. ], batch size: 43, lr: 4.35e-03, grad_scale: 8.0 +2023-03-09 19:45:11,612 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0907, 5.5391, 3.0429, 5.4074, 5.3046, 5.5842, 5.4233, 3.1371], + device='cuda:2'), covar=tensor([0.0205, 0.0066, 0.0668, 0.0065, 0.0062, 0.0063, 0.0077, 0.0780], + device='cuda:2'), in_proj_covar=tensor([0.0091, 0.0083, 0.0097, 0.0098, 0.0089, 0.0079, 0.0086, 0.0098], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 19:45:29,308 INFO [train.py:898] (2/4) Epoch 26, batch 650, loss[loss=0.157, simple_loss=0.2533, pruned_loss=0.03032, over 18621.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2498, pruned_loss=0.03402, over 3446947.52 frames. ], batch size: 52, lr: 4.35e-03, grad_scale: 8.0 +2023-03-09 19:45:32,527 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.747e+02 2.481e+02 2.888e+02 3.491e+02 6.758e+02, threshold=5.775e+02, percent-clipped=2.0 +2023-03-09 19:45:33,455 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-09 19:45:34,571 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91505.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:46:16,616 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91542.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 19:46:27,383 INFO [train.py:898] (2/4) Epoch 26, batch 700, loss[loss=0.1294, simple_loss=0.2151, pruned_loss=0.02179, over 18160.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2496, pruned_loss=0.03408, over 3468920.26 frames. ], batch size: 44, lr: 4.35e-03, grad_scale: 4.0 +2023-03-09 19:46:29,757 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91553.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:46:43,433 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.6121, 6.1073, 5.6593, 5.9357, 5.7946, 5.5566, 6.2394, 6.1790], + device='cuda:2'), covar=tensor([0.1124, 0.0730, 0.0450, 0.0693, 0.1304, 0.0705, 0.0573, 0.0628], + device='cuda:2'), in_proj_covar=tensor([0.0628, 0.0556, 0.0404, 0.0579, 0.0777, 0.0575, 0.0791, 0.0603], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 19:47:26,223 INFO [train.py:898] (2/4) Epoch 26, batch 750, loss[loss=0.1387, simple_loss=0.2203, pruned_loss=0.02859, over 18431.00 frames. ], tot_loss[loss=0.158, simple_loss=0.2485, pruned_loss=0.03375, over 3503135.29 frames. ], batch size: 43, lr: 4.35e-03, grad_scale: 4.0 +2023-03-09 19:47:29,380 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91603.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 19:47:30,098 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.568e+02 2.592e+02 2.993e+02 3.569e+02 6.310e+02, threshold=5.987e+02, percent-clipped=2.0 +2023-03-09 19:48:12,551 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91641.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:48:23,823 INFO [train.py:898] (2/4) Epoch 26, batch 800, loss[loss=0.1856, simple_loss=0.2685, pruned_loss=0.05139, over 12288.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.2481, pruned_loss=0.03357, over 3519111.56 frames. ], batch size: 130, lr: 4.35e-03, grad_scale: 8.0 +2023-03-09 19:48:34,572 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6800, 3.7304, 5.1048, 2.9883, 4.4750, 2.5429, 3.1850, 1.6704], + device='cuda:2'), covar=tensor([0.1432, 0.0985, 0.0169, 0.0971, 0.0511, 0.2739, 0.2804, 0.2461], + device='cuda:2'), in_proj_covar=tensor([0.0228, 0.0250, 0.0218, 0.0206, 0.0263, 0.0277, 0.0335, 0.0242], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 19:49:22,520 INFO [train.py:898] (2/4) Epoch 26, batch 850, loss[loss=0.1665, simple_loss=0.2545, pruned_loss=0.03924, over 18306.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2477, pruned_loss=0.03361, over 3533128.66 frames. ], batch size: 54, lr: 4.34e-03, grad_scale: 8.0 +2023-03-09 19:49:25,715 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.771e+02 2.531e+02 3.031e+02 3.445e+02 6.326e+02, threshold=6.062e+02, percent-clipped=1.0 +2023-03-09 19:50:20,625 INFO [train.py:898] (2/4) Epoch 26, batch 900, loss[loss=0.1582, simple_loss=0.2564, pruned_loss=0.02999, over 18398.00 frames. ], tot_loss[loss=0.157, simple_loss=0.247, pruned_loss=0.03344, over 3543298.49 frames. ], batch size: 52, lr: 4.34e-03, grad_scale: 8.0 +2023-03-09 19:51:14,105 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-09 19:51:16,416 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-03-09 19:51:19,511 INFO [train.py:898] (2/4) Epoch 26, batch 950, loss[loss=0.1365, simple_loss=0.2232, pruned_loss=0.02489, over 18150.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2463, pruned_loss=0.03333, over 3558420.60 frames. ], batch size: 44, lr: 4.34e-03, grad_scale: 8.0 +2023-03-09 19:51:22,550 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.812e+02 2.439e+02 3.074e+02 3.438e+02 1.468e+03, threshold=6.149e+02, percent-clipped=4.0 +2023-03-09 19:52:17,347 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1547, 5.2163, 5.4557, 5.5216, 5.0338, 6.0096, 5.6080, 5.2348], + device='cuda:2'), covar=tensor([0.1181, 0.0671, 0.0726, 0.0791, 0.1465, 0.0681, 0.0718, 0.1662], + device='cuda:2'), in_proj_covar=tensor([0.0363, 0.0296, 0.0322, 0.0325, 0.0337, 0.0435, 0.0291, 0.0427], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 19:52:18,266 INFO [train.py:898] (2/4) Epoch 26, batch 1000, loss[loss=0.164, simple_loss=0.2552, pruned_loss=0.03636, over 18471.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2474, pruned_loss=0.03366, over 3550524.65 frames. ], batch size: 53, lr: 4.34e-03, grad_scale: 8.0 +2023-03-09 19:53:05,566 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91891.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:53:13,175 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91898.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 19:53:16,256 INFO [train.py:898] (2/4) Epoch 26, batch 1050, loss[loss=0.1767, simple_loss=0.2722, pruned_loss=0.04064, over 15987.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.2482, pruned_loss=0.03382, over 3557418.07 frames. ], batch size: 94, lr: 4.34e-03, grad_scale: 8.0 +2023-03-09 19:53:19,766 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.941e+02 2.632e+02 3.014e+02 3.529e+02 5.169e+02, threshold=6.027e+02, percent-clipped=0.0 +2023-03-09 19:53:38,734 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-09 19:54:03,928 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91941.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:54:14,920 INFO [train.py:898] (2/4) Epoch 26, batch 1100, loss[loss=0.14, simple_loss=0.2244, pruned_loss=0.0278, over 18470.00 frames. ], tot_loss[loss=0.1584, simple_loss=0.2488, pruned_loss=0.03402, over 3559865.28 frames. ], batch size: 44, lr: 4.34e-03, grad_scale: 8.0 +2023-03-09 19:54:16,424 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91952.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:54:19,686 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91955.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:54:50,817 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91981.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 19:54:59,755 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=91989.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:55:17,951 INFO [train.py:898] (2/4) Epoch 26, batch 1150, loss[loss=0.1579, simple_loss=0.2392, pruned_loss=0.03831, over 17695.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.2481, pruned_loss=0.03361, over 3573238.45 frames. ], batch size: 39, lr: 4.34e-03, grad_scale: 8.0 +2023-03-09 19:55:21,344 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.947e+02 2.558e+02 3.039e+02 3.805e+02 7.483e+02, threshold=6.077e+02, percent-clipped=1.0 +2023-03-09 19:55:34,966 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92016.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 19:56:06,545 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92042.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 19:56:16,320 INFO [train.py:898] (2/4) Epoch 26, batch 1200, loss[loss=0.1668, simple_loss=0.2583, pruned_loss=0.03767, over 17710.00 frames. ], tot_loss[loss=0.1586, simple_loss=0.249, pruned_loss=0.0341, over 3562926.82 frames. ], batch size: 70, lr: 4.34e-03, grad_scale: 8.0 +2023-03-09 19:56:44,661 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-09 19:57:14,322 INFO [train.py:898] (2/4) Epoch 26, batch 1250, loss[loss=0.1658, simple_loss=0.2568, pruned_loss=0.03745, over 18566.00 frames. ], tot_loss[loss=0.1584, simple_loss=0.2486, pruned_loss=0.03404, over 3579541.91 frames. ], batch size: 54, lr: 4.33e-03, grad_scale: 8.0 +2023-03-09 19:57:17,654 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.939e+02 2.685e+02 3.194e+02 3.689e+02 7.845e+02, threshold=6.387e+02, percent-clipped=2.0 +2023-03-09 19:58:12,743 INFO [train.py:898] (2/4) Epoch 26, batch 1300, loss[loss=0.1833, simple_loss=0.2798, pruned_loss=0.04339, over 18461.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.2487, pruned_loss=0.03401, over 3576590.71 frames. ], batch size: 59, lr: 4.33e-03, grad_scale: 8.0 +2023-03-09 19:58:19,723 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0488, 5.5401, 2.8869, 5.3970, 5.2787, 5.5695, 5.4098, 2.9174], + device='cuda:2'), covar=tensor([0.0220, 0.0058, 0.0713, 0.0063, 0.0062, 0.0065, 0.0071, 0.0890], + device='cuda:2'), in_proj_covar=tensor([0.0092, 0.0084, 0.0098, 0.0099, 0.0089, 0.0080, 0.0087, 0.0099], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 19:58:24,315 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8090, 3.5761, 5.2174, 3.0327, 4.5461, 2.6230, 3.0220, 1.6472], + device='cuda:2'), covar=tensor([0.1266, 0.0946, 0.0145, 0.0920, 0.0496, 0.2497, 0.2789, 0.2303], + device='cuda:2'), in_proj_covar=tensor([0.0229, 0.0253, 0.0220, 0.0207, 0.0266, 0.0278, 0.0339, 0.0244], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 19:58:25,333 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6953, 3.4132, 4.7403, 4.1387, 3.1142, 2.8319, 4.0185, 4.9454], + device='cuda:2'), covar=tensor([0.0847, 0.1567, 0.0229, 0.0468, 0.1092, 0.1355, 0.0511, 0.0252], + device='cuda:2'), in_proj_covar=tensor([0.0154, 0.0285, 0.0169, 0.0188, 0.0198, 0.0197, 0.0202, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 19:58:59,550 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8810, 2.6937, 4.5360, 4.0223, 2.6146, 4.8066, 4.0294, 3.0407], + device='cuda:2'), covar=tensor([0.0451, 0.2038, 0.0311, 0.0408, 0.2065, 0.0241, 0.0629, 0.1264], + device='cuda:2'), in_proj_covar=tensor([0.0219, 0.0244, 0.0231, 0.0171, 0.0227, 0.0220, 0.0258, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 19:59:05,718 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8963, 5.2147, 2.4043, 5.1120, 4.9558, 5.2564, 5.0517, 2.8195], + device='cuda:2'), covar=tensor([0.0236, 0.0066, 0.0917, 0.0074, 0.0077, 0.0075, 0.0091, 0.0955], + device='cuda:2'), in_proj_covar=tensor([0.0092, 0.0083, 0.0098, 0.0099, 0.0089, 0.0080, 0.0087, 0.0098], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 19:59:07,897 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92198.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 19:59:11,321 INFO [train.py:898] (2/4) Epoch 26, batch 1350, loss[loss=0.1523, simple_loss=0.2462, pruned_loss=0.02922, over 18383.00 frames. ], tot_loss[loss=0.1581, simple_loss=0.2484, pruned_loss=0.03389, over 3577669.44 frames. ], batch size: 50, lr: 4.33e-03, grad_scale: 8.0 +2023-03-09 19:59:14,649 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.584e+02 2.419e+02 2.885e+02 3.522e+02 6.118e+02, threshold=5.770e+02, percent-clipped=0.0 +2023-03-09 19:59:20,759 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.81 vs. limit=5.0 +2023-03-09 19:59:26,014 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3152, 5.2651, 5.6200, 5.5540, 5.2928, 6.1390, 5.7261, 5.3682], + device='cuda:2'), covar=tensor([0.1104, 0.0625, 0.0660, 0.0773, 0.1399, 0.0659, 0.0685, 0.1655], + device='cuda:2'), in_proj_covar=tensor([0.0367, 0.0300, 0.0323, 0.0328, 0.0339, 0.0440, 0.0293, 0.0433], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 19:59:35,136 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4816, 2.7523, 2.4018, 2.7317, 3.5307, 3.4767, 3.0105, 2.8478], + device='cuda:2'), covar=tensor([0.0171, 0.0303, 0.0617, 0.0401, 0.0193, 0.0175, 0.0390, 0.0387], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0144, 0.0166, 0.0164, 0.0139, 0.0125, 0.0161, 0.0164], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 20:00:03,380 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92246.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 20:00:05,023 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92247.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:00:09,203 INFO [train.py:898] (2/4) Epoch 26, batch 1400, loss[loss=0.142, simple_loss=0.2235, pruned_loss=0.03025, over 18400.00 frames. ], tot_loss[loss=0.158, simple_loss=0.248, pruned_loss=0.03398, over 3574823.90 frames. ], batch size: 42, lr: 4.33e-03, grad_scale: 8.0 +2023-03-09 20:01:06,844 INFO [train.py:898] (2/4) Epoch 26, batch 1450, loss[loss=0.1594, simple_loss=0.2633, pruned_loss=0.02774, over 18499.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2476, pruned_loss=0.03363, over 3589069.04 frames. ], batch size: 51, lr: 4.33e-03, grad_scale: 8.0 +2023-03-09 20:01:10,175 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.801e+02 2.582e+02 3.137e+02 3.833e+02 9.171e+02, threshold=6.274e+02, percent-clipped=8.0 +2023-03-09 20:01:19,060 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92311.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:01:48,293 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92337.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 20:02:04,831 INFO [train.py:898] (2/4) Epoch 26, batch 1500, loss[loss=0.1679, simple_loss=0.2596, pruned_loss=0.03807, over 18127.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2478, pruned_loss=0.0338, over 3561613.64 frames. ], batch size: 62, lr: 4.33e-03, grad_scale: 8.0 +2023-03-09 20:03:03,825 INFO [train.py:898] (2/4) Epoch 26, batch 1550, loss[loss=0.178, simple_loss=0.2755, pruned_loss=0.04025, over 18226.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2475, pruned_loss=0.03357, over 3577186.98 frames. ], batch size: 60, lr: 4.33e-03, grad_scale: 8.0 +2023-03-09 20:03:07,221 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.867e+02 2.584e+02 2.933e+02 3.549e+02 6.992e+02, threshold=5.866e+02, percent-clipped=1.0 +2023-03-09 20:04:01,756 INFO [train.py:898] (2/4) Epoch 26, batch 1600, loss[loss=0.1366, simple_loss=0.2284, pruned_loss=0.02241, over 18255.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.2473, pruned_loss=0.03319, over 3581891.33 frames. ], batch size: 47, lr: 4.33e-03, grad_scale: 8.0 +2023-03-09 20:04:31,048 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.48 vs. limit=5.0 +2023-03-09 20:04:58,558 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-03-09 20:04:59,855 INFO [train.py:898] (2/4) Epoch 26, batch 1650, loss[loss=0.1325, simple_loss=0.2268, pruned_loss=0.01907, over 18406.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.2471, pruned_loss=0.03291, over 3584486.69 frames. ], batch size: 48, lr: 4.33e-03, grad_scale: 8.0 +2023-03-09 20:05:02,962 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.649e+02 2.477e+02 2.880e+02 3.587e+02 5.533e+02, threshold=5.760e+02, percent-clipped=0.0 +2023-03-09 20:05:53,465 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92547.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:05:54,642 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3848, 3.2190, 1.8143, 4.1183, 2.7507, 3.4988, 2.1154, 3.4724], + device='cuda:2'), covar=tensor([0.0618, 0.0980, 0.1857, 0.0528, 0.1015, 0.0346, 0.1504, 0.0581], + device='cuda:2'), in_proj_covar=tensor([0.0225, 0.0234, 0.0197, 0.0298, 0.0199, 0.0274, 0.0209, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 20:05:58,141 INFO [train.py:898] (2/4) Epoch 26, batch 1700, loss[loss=0.1499, simple_loss=0.2394, pruned_loss=0.0302, over 18279.00 frames. ], tot_loss[loss=0.1563, simple_loss=0.2472, pruned_loss=0.03273, over 3598510.00 frames. ], batch size: 47, lr: 4.32e-03, grad_scale: 8.0 +2023-03-09 20:06:48,779 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92595.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:06:55,618 INFO [train.py:898] (2/4) Epoch 26, batch 1750, loss[loss=0.1502, simple_loss=0.2401, pruned_loss=0.03018, over 18277.00 frames. ], tot_loss[loss=0.156, simple_loss=0.2469, pruned_loss=0.03255, over 3603677.50 frames. ], batch size: 49, lr: 4.32e-03, grad_scale: 8.0 +2023-03-09 20:06:57,759 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6698, 3.0441, 4.3404, 3.5911, 2.7638, 4.6495, 3.9410, 2.9061], + device='cuda:2'), covar=tensor([0.0489, 0.1313, 0.0314, 0.0507, 0.1551, 0.0192, 0.0559, 0.0961], + device='cuda:2'), in_proj_covar=tensor([0.0220, 0.0245, 0.0230, 0.0172, 0.0227, 0.0220, 0.0257, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 20:06:59,645 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.605e+02 2.546e+02 2.994e+02 3.589e+02 6.308e+02, threshold=5.987e+02, percent-clipped=1.0 +2023-03-09 20:07:08,068 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92611.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:07:25,108 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92625.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:07:38,496 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92637.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 20:07:54,312 INFO [train.py:898] (2/4) Epoch 26, batch 1800, loss[loss=0.154, simple_loss=0.2506, pruned_loss=0.02872, over 18620.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.2469, pruned_loss=0.03269, over 3597815.29 frames. ], batch size: 52, lr: 4.32e-03, grad_scale: 8.0 +2023-03-09 20:08:04,018 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92659.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:08:35,068 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=92685.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 20:08:36,228 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92686.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:08:52,549 INFO [train.py:898] (2/4) Epoch 26, batch 1850, loss[loss=0.1609, simple_loss=0.2589, pruned_loss=0.03139, over 18339.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2476, pruned_loss=0.03283, over 3598494.83 frames. ], batch size: 55, lr: 4.32e-03, grad_scale: 8.0 +2023-03-09 20:08:55,756 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.027e+02 2.840e+02 3.330e+02 3.901e+02 1.111e+03, threshold=6.660e+02, percent-clipped=3.0 +2023-03-09 20:09:09,683 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-03-09 20:09:16,057 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92720.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:09:51,329 INFO [train.py:898] (2/4) Epoch 26, batch 1900, loss[loss=0.1579, simple_loss=0.2506, pruned_loss=0.03256, over 17111.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.2471, pruned_loss=0.03282, over 3594135.04 frames. ], batch size: 78, lr: 4.32e-03, grad_scale: 8.0 +2023-03-09 20:10:27,061 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5049, 2.7672, 4.0096, 3.4947, 2.6305, 4.2059, 3.7029, 2.7653], + device='cuda:2'), covar=tensor([0.0498, 0.1371, 0.0367, 0.0462, 0.1472, 0.0261, 0.0659, 0.0954], + device='cuda:2'), in_proj_covar=tensor([0.0218, 0.0243, 0.0228, 0.0170, 0.0225, 0.0219, 0.0255, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 20:10:28,224 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92781.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:10:38,248 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5275, 6.0335, 5.5999, 5.8676, 5.6514, 5.5348, 6.1324, 6.0874], + device='cuda:2'), covar=tensor([0.1030, 0.0741, 0.0473, 0.0649, 0.1253, 0.0617, 0.0512, 0.0579], + device='cuda:2'), in_proj_covar=tensor([0.0622, 0.0553, 0.0401, 0.0571, 0.0768, 0.0567, 0.0779, 0.0593], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 20:10:50,691 INFO [train.py:898] (2/4) Epoch 26, batch 1950, loss[loss=0.1707, simple_loss=0.2604, pruned_loss=0.04051, over 17827.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2483, pruned_loss=0.03319, over 3587304.75 frames. ], batch size: 70, lr: 4.32e-03, grad_scale: 8.0 +2023-03-09 20:10:54,074 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.627e+02 2.403e+02 2.804e+02 3.512e+02 6.323e+02, threshold=5.608e+02, percent-clipped=0.0 +2023-03-09 20:11:26,562 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92831.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:11:49,127 INFO [train.py:898] (2/4) Epoch 26, batch 2000, loss[loss=0.1684, simple_loss=0.2658, pruned_loss=0.03549, over 18403.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2477, pruned_loss=0.03324, over 3582803.47 frames. ], batch size: 52, lr: 4.32e-03, grad_scale: 8.0 +2023-03-09 20:11:52,308 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-09 20:12:08,334 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7905, 3.0799, 4.4681, 3.9414, 2.7070, 4.6691, 3.9914, 2.7861], + device='cuda:2'), covar=tensor([0.0485, 0.1335, 0.0271, 0.0368, 0.1680, 0.0215, 0.0580, 0.1160], + device='cuda:2'), in_proj_covar=tensor([0.0219, 0.0245, 0.0230, 0.0172, 0.0227, 0.0220, 0.0257, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 20:12:38,419 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92892.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:12:45,926 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92899.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:12:47,842 INFO [train.py:898] (2/4) Epoch 26, batch 2050, loss[loss=0.1357, simple_loss=0.2171, pruned_loss=0.02713, over 18500.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2476, pruned_loss=0.03341, over 3576551.05 frames. ], batch size: 44, lr: 4.32e-03, grad_scale: 8.0 +2023-03-09 20:12:51,213 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.874e+02 2.574e+02 3.005e+02 3.405e+02 1.130e+03, threshold=6.011e+02, percent-clipped=2.0 +2023-03-09 20:13:23,141 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92931.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:13:37,410 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9101, 4.9435, 4.4321, 4.8336, 4.8627, 4.3285, 4.7954, 4.5206], + device='cuda:2'), covar=tensor([0.0569, 0.0633, 0.1770, 0.0921, 0.0692, 0.0556, 0.0566, 0.1228], + device='cuda:2'), in_proj_covar=tensor([0.0510, 0.0579, 0.0721, 0.0450, 0.0472, 0.0521, 0.0557, 0.0692], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 20:13:45,946 INFO [train.py:898] (2/4) Epoch 26, batch 2100, loss[loss=0.1537, simple_loss=0.2396, pruned_loss=0.03392, over 18263.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.248, pruned_loss=0.03347, over 3587727.64 frames. ], batch size: 47, lr: 4.31e-03, grad_scale: 8.0 +2023-03-09 20:13:56,516 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92960.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:13:58,994 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8499, 3.7944, 5.0792, 4.4863, 3.6025, 3.0934, 4.6148, 5.3531], + device='cuda:2'), covar=tensor([0.0805, 0.1476, 0.0200, 0.0401, 0.0898, 0.1248, 0.0375, 0.0262], + device='cuda:2'), in_proj_covar=tensor([0.0154, 0.0283, 0.0169, 0.0186, 0.0196, 0.0196, 0.0201, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 20:14:13,906 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92975.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:14:21,717 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92981.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:14:34,807 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92992.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:14:44,684 INFO [train.py:898] (2/4) Epoch 26, batch 2150, loss[loss=0.1703, simple_loss=0.2617, pruned_loss=0.03942, over 18287.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2471, pruned_loss=0.0331, over 3592244.55 frames. ], batch size: 57, lr: 4.31e-03, grad_scale: 8.0 +2023-03-09 20:14:48,054 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.894e+02 2.711e+02 3.209e+02 3.707e+02 5.473e+02, threshold=6.417e+02, percent-clipped=0.0 +2023-03-09 20:14:55,257 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93010.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:15:26,178 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93036.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:15:26,201 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93036.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:15:43,286 INFO [train.py:898] (2/4) Epoch 26, batch 2200, loss[loss=0.1464, simple_loss=0.2302, pruned_loss=0.03134, over 18246.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.2476, pruned_loss=0.03312, over 3599652.44 frames. ], batch size: 45, lr: 4.31e-03, grad_scale: 8.0 +2023-03-09 20:16:06,561 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93071.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:16:11,075 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-09 20:16:12,723 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93076.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:16:38,182 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93097.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 20:16:42,352 INFO [train.py:898] (2/4) Epoch 26, batch 2250, loss[loss=0.1388, simple_loss=0.2204, pruned_loss=0.02859, over 18563.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2478, pruned_loss=0.03321, over 3592709.92 frames. ], batch size: 45, lr: 4.31e-03, grad_scale: 8.0 +2023-03-09 20:16:45,627 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.732e+02 2.458e+02 2.849e+02 3.441e+02 5.512e+02, threshold=5.698e+02, percent-clipped=0.0 +2023-03-09 20:16:58,245 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93115.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:16:59,328 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9145, 5.4212, 2.7378, 5.2342, 5.0714, 5.4036, 5.1849, 2.5041], + device='cuda:2'), covar=tensor([0.0253, 0.0062, 0.0848, 0.0080, 0.0085, 0.0084, 0.0097, 0.1145], + device='cuda:2'), in_proj_covar=tensor([0.0093, 0.0084, 0.0098, 0.0098, 0.0090, 0.0080, 0.0087, 0.0099], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 20:17:41,061 INFO [train.py:898] (2/4) Epoch 26, batch 2300, loss[loss=0.1267, simple_loss=0.2104, pruned_loss=0.02152, over 18414.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2474, pruned_loss=0.03338, over 3590206.46 frames. ], batch size: 43, lr: 4.31e-03, grad_scale: 8.0 +2023-03-09 20:18:05,304 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6268, 2.2581, 2.4888, 2.6595, 2.9984, 4.6715, 4.6887, 3.4602], + device='cuda:2'), covar=tensor([0.2083, 0.2748, 0.3193, 0.1992, 0.2786, 0.0283, 0.0372, 0.0947], + device='cuda:2'), in_proj_covar=tensor([0.0323, 0.0359, 0.0403, 0.0286, 0.0394, 0.0261, 0.0301, 0.0270], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 20:18:09,596 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93176.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:18:22,477 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93187.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:18:23,690 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7063, 4.4317, 4.4431, 3.2514, 3.6705, 3.4072, 2.7337, 2.3343], + device='cuda:2'), covar=tensor([0.0248, 0.0123, 0.0081, 0.0349, 0.0350, 0.0247, 0.0668, 0.0900], + device='cuda:2'), in_proj_covar=tensor([0.0075, 0.0064, 0.0069, 0.0072, 0.0093, 0.0071, 0.0080, 0.0087], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 20:18:40,066 INFO [train.py:898] (2/4) Epoch 26, batch 2350, loss[loss=0.1528, simple_loss=0.2463, pruned_loss=0.02968, over 18400.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.2468, pruned_loss=0.03336, over 3593271.76 frames. ], batch size: 48, lr: 4.31e-03, grad_scale: 8.0 +2023-03-09 20:18:43,316 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.656e+02 2.468e+02 2.905e+02 3.361e+02 5.906e+02, threshold=5.811e+02, percent-clipped=1.0 +2023-03-09 20:18:50,790 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-09 20:18:54,862 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5318, 3.0898, 3.8280, 3.5890, 3.1060, 2.9484, 3.5954, 3.9466], + device='cuda:2'), covar=tensor([0.0763, 0.1085, 0.0297, 0.0443, 0.0808, 0.1060, 0.0451, 0.0405], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0280, 0.0167, 0.0184, 0.0194, 0.0195, 0.0198, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 20:19:17,994 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7010, 2.4411, 2.6568, 2.7749, 3.2847, 4.7524, 4.7697, 3.0469], + device='cuda:2'), covar=tensor([0.2051, 0.2582, 0.3115, 0.1891, 0.2461, 0.0293, 0.0361, 0.1221], + device='cuda:2'), in_proj_covar=tensor([0.0323, 0.0360, 0.0403, 0.0286, 0.0394, 0.0262, 0.0301, 0.0270], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 20:19:38,035 INFO [train.py:898] (2/4) Epoch 26, batch 2400, loss[loss=0.1573, simple_loss=0.2579, pruned_loss=0.02839, over 18414.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2471, pruned_loss=0.0334, over 3578612.64 frames. ], batch size: 52, lr: 4.31e-03, grad_scale: 8.0 +2023-03-09 20:19:42,647 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93255.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:19:45,079 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0851, 5.0883, 5.1459, 4.8556, 4.9307, 4.9124, 5.2209, 5.2192], + device='cuda:2'), covar=tensor([0.0072, 0.0069, 0.0065, 0.0110, 0.0063, 0.0148, 0.0076, 0.0114], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0074, 0.0080, 0.0099, 0.0079, 0.0109, 0.0091, 0.0090], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 20:19:45,450 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-09 20:20:11,684 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93281.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:20:19,059 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.6059, 5.4325, 5.8186, 5.8304, 5.4817, 6.3486, 6.0279, 5.6709], + device='cuda:2'), covar=tensor([0.0974, 0.0608, 0.0687, 0.0687, 0.1279, 0.0592, 0.0636, 0.1480], + device='cuda:2'), in_proj_covar=tensor([0.0369, 0.0303, 0.0326, 0.0330, 0.0342, 0.0441, 0.0296, 0.0432], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 20:20:19,074 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93287.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:20:36,289 INFO [train.py:898] (2/4) Epoch 26, batch 2450, loss[loss=0.1528, simple_loss=0.2398, pruned_loss=0.0329, over 18525.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2475, pruned_loss=0.03346, over 3592575.44 frames. ], batch size: 49, lr: 4.31e-03, grad_scale: 8.0 +2023-03-09 20:20:39,775 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.978e+02 2.528e+02 2.957e+02 3.512e+02 5.428e+02, threshold=5.913e+02, percent-clipped=0.0 +2023-03-09 20:20:53,691 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3970, 5.3210, 5.6021, 5.6222, 5.3304, 6.1627, 5.8243, 5.4026], + device='cuda:2'), covar=tensor([0.1057, 0.0658, 0.0710, 0.0885, 0.1370, 0.0666, 0.0705, 0.1776], + device='cuda:2'), in_proj_covar=tensor([0.0370, 0.0304, 0.0327, 0.0330, 0.0343, 0.0442, 0.0296, 0.0432], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 20:21:08,322 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93329.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:21:10,603 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93331.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:21:33,887 INFO [train.py:898] (2/4) Epoch 26, batch 2500, loss[loss=0.1372, simple_loss=0.2136, pruned_loss=0.03038, over 17669.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2479, pruned_loss=0.03383, over 3579118.50 frames. ], batch size: 39, lr: 4.31e-03, grad_scale: 8.0 +2023-03-09 20:21:48,050 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9611, 5.4960, 2.7266, 5.3283, 5.1795, 5.5466, 5.3138, 2.9042], + device='cuda:2'), covar=tensor([0.0219, 0.0055, 0.0777, 0.0068, 0.0072, 0.0053, 0.0076, 0.0909], + device='cuda:2'), in_proj_covar=tensor([0.0092, 0.0084, 0.0098, 0.0098, 0.0090, 0.0079, 0.0087, 0.0098], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 20:21:52,409 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93366.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:21:55,157 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.03 vs. limit=5.0 +2023-03-09 20:22:03,727 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93376.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:22:22,158 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93392.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 20:22:32,979 INFO [train.py:898] (2/4) Epoch 26, batch 2550, loss[loss=0.1929, simple_loss=0.2773, pruned_loss=0.05422, over 12888.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2481, pruned_loss=0.03372, over 3580814.74 frames. ], batch size: 130, lr: 4.30e-03, grad_scale: 8.0 +2023-03-09 20:22:36,875 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.774e+02 2.484e+02 3.027e+02 3.757e+02 6.388e+02, threshold=6.054e+02, percent-clipped=1.0 +2023-03-09 20:22:41,588 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.61 vs. limit=2.0 +2023-03-09 20:23:00,437 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93424.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:23:31,988 INFO [train.py:898] (2/4) Epoch 26, batch 2600, loss[loss=0.1616, simple_loss=0.2578, pruned_loss=0.03268, over 18073.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.248, pruned_loss=0.03376, over 3580781.27 frames. ], batch size: 62, lr: 4.30e-03, grad_scale: 8.0 +2023-03-09 20:23:55,864 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93471.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:24:05,072 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8335, 3.3761, 4.5291, 3.9290, 2.9993, 4.7989, 3.9533, 3.1179], + device='cuda:2'), covar=tensor([0.0462, 0.1105, 0.0301, 0.0440, 0.1379, 0.0195, 0.0616, 0.0852], + device='cuda:2'), in_proj_covar=tensor([0.0219, 0.0243, 0.0229, 0.0171, 0.0226, 0.0219, 0.0258, 0.0200], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 20:24:14,473 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93487.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:24:30,093 INFO [train.py:898] (2/4) Epoch 26, batch 2650, loss[loss=0.1527, simple_loss=0.236, pruned_loss=0.03473, over 18506.00 frames. ], tot_loss[loss=0.158, simple_loss=0.2484, pruned_loss=0.03377, over 3588255.06 frames. ], batch size: 47, lr: 4.30e-03, grad_scale: 8.0 +2023-03-09 20:24:34,072 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.541e+02 2.517e+02 2.981e+02 3.680e+02 1.070e+03, threshold=5.961e+02, percent-clipped=2.0 +2023-03-09 20:24:49,777 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6332, 4.1447, 2.5902, 3.9291, 3.9943, 4.1896, 4.0188, 2.5853], + device='cuda:2'), covar=tensor([0.0265, 0.0094, 0.0840, 0.0195, 0.0111, 0.0100, 0.0123, 0.1010], + device='cuda:2'), in_proj_covar=tensor([0.0093, 0.0085, 0.0099, 0.0099, 0.0090, 0.0080, 0.0087, 0.0099], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 20:25:09,830 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93535.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:25:27,947 INFO [train.py:898] (2/4) Epoch 26, batch 2700, loss[loss=0.1585, simple_loss=0.2603, pruned_loss=0.02832, over 18359.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2477, pruned_loss=0.03348, over 3591713.76 frames. ], batch size: 55, lr: 4.30e-03, grad_scale: 16.0 +2023-03-09 20:25:32,732 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93555.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:25:56,243 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-09 20:26:10,299 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93587.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:26:26,587 INFO [train.py:898] (2/4) Epoch 26, batch 2750, loss[loss=0.1633, simple_loss=0.2583, pruned_loss=0.03422, over 17720.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2483, pruned_loss=0.03362, over 3598146.47 frames. ], batch size: 70, lr: 4.30e-03, grad_scale: 16.0 +2023-03-09 20:26:29,019 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93603.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:26:29,923 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.721e+02 2.428e+02 2.838e+02 3.417e+02 1.067e+03, threshold=5.676e+02, percent-clipped=2.0 +2023-03-09 20:26:34,885 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8657, 5.1266, 5.0909, 5.1311, 4.8685, 5.6407, 5.2627, 4.9983], + device='cuda:2'), covar=tensor([0.1222, 0.0735, 0.0849, 0.0906, 0.1387, 0.0759, 0.0749, 0.1710], + device='cuda:2'), in_proj_covar=tensor([0.0368, 0.0303, 0.0325, 0.0330, 0.0342, 0.0441, 0.0294, 0.0432], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 20:27:02,466 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93631.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:27:06,745 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93635.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:27:25,263 INFO [train.py:898] (2/4) Epoch 26, batch 2800, loss[loss=0.1498, simple_loss=0.2432, pruned_loss=0.02816, over 18274.00 frames. ], tot_loss[loss=0.1582, simple_loss=0.2489, pruned_loss=0.03373, over 3588760.14 frames. ], batch size: 49, lr: 4.30e-03, grad_scale: 16.0 +2023-03-09 20:27:43,737 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93666.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:27:58,611 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93679.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:28:13,418 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93692.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:28:23,640 INFO [train.py:898] (2/4) Epoch 26, batch 2850, loss[loss=0.1669, simple_loss=0.2647, pruned_loss=0.03452, over 18261.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2486, pruned_loss=0.03348, over 3599059.05 frames. ], batch size: 60, lr: 4.30e-03, grad_scale: 16.0 +2023-03-09 20:28:27,049 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.715e+02 2.471e+02 3.010e+02 3.579e+02 6.367e+02, threshold=6.020e+02, percent-clipped=3.0 +2023-03-09 20:28:39,128 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93714.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:29:07,750 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-09 20:29:09,328 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93740.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:29:20,406 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6624, 2.9034, 2.6384, 2.8104, 3.6973, 3.6247, 3.1740, 2.8557], + device='cuda:2'), covar=tensor([0.0171, 0.0313, 0.0569, 0.0415, 0.0189, 0.0170, 0.0420, 0.0426], + device='cuda:2'), in_proj_covar=tensor([0.0145, 0.0145, 0.0167, 0.0164, 0.0141, 0.0126, 0.0162, 0.0164], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 20:29:21,129 INFO [train.py:898] (2/4) Epoch 26, batch 2900, loss[loss=0.1505, simple_loss=0.245, pruned_loss=0.02802, over 18296.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.2484, pruned_loss=0.03332, over 3600083.53 frames. ], batch size: 49, lr: 4.30e-03, grad_scale: 16.0 +2023-03-09 20:29:44,639 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93770.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:29:45,659 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1802, 5.1993, 5.4635, 5.5292, 5.0871, 5.9623, 5.5995, 5.1875], + device='cuda:2'), covar=tensor([0.1016, 0.0726, 0.0755, 0.0880, 0.1403, 0.0711, 0.0688, 0.1724], + device='cuda:2'), in_proj_covar=tensor([0.0367, 0.0303, 0.0323, 0.0331, 0.0342, 0.0439, 0.0294, 0.0431], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 20:29:45,704 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93771.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:30:19,939 INFO [train.py:898] (2/4) Epoch 26, batch 2950, loss[loss=0.1262, simple_loss=0.2099, pruned_loss=0.02123, over 18493.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.2475, pruned_loss=0.03302, over 3607407.05 frames. ], batch size: 44, lr: 4.30e-03, grad_scale: 16.0 +2023-03-09 20:30:23,986 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.766e+02 2.413e+02 2.765e+02 3.459e+02 6.770e+02, threshold=5.531e+02, percent-clipped=1.0 +2023-03-09 20:30:41,698 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=93819.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:30:42,955 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3532, 5.3144, 4.9575, 5.3055, 5.3001, 4.7025, 5.1795, 4.9247], + device='cuda:2'), covar=tensor([0.0443, 0.0510, 0.1314, 0.0702, 0.0640, 0.0413, 0.0431, 0.1022], + device='cuda:2'), in_proj_covar=tensor([0.0510, 0.0577, 0.0723, 0.0449, 0.0474, 0.0525, 0.0559, 0.0695], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 20:30:52,503 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-03-09 20:30:56,239 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93831.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:31:04,174 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6612, 2.4057, 2.5879, 2.7188, 3.1855, 4.8595, 4.7556, 3.3210], + device='cuda:2'), covar=tensor([0.2119, 0.2560, 0.3415, 0.1919, 0.2583, 0.0256, 0.0362, 0.1094], + device='cuda:2'), in_proj_covar=tensor([0.0324, 0.0360, 0.0404, 0.0287, 0.0394, 0.0263, 0.0301, 0.0270], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 20:31:18,477 INFO [train.py:898] (2/4) Epoch 26, batch 3000, loss[loss=0.1325, simple_loss=0.2192, pruned_loss=0.02294, over 18113.00 frames. ], tot_loss[loss=0.1563, simple_loss=0.2471, pruned_loss=0.03275, over 3612238.76 frames. ], batch size: 40, lr: 4.29e-03, grad_scale: 16.0 +2023-03-09 20:31:18,477 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 20:31:29,430 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1553, 2.7428, 2.5352, 2.6264, 3.4472, 3.2707, 3.0492, 2.7327], + device='cuda:2'), covar=tensor([0.0183, 0.0300, 0.0535, 0.0405, 0.0193, 0.0184, 0.0355, 0.0402], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0146, 0.0169, 0.0167, 0.0142, 0.0128, 0.0163, 0.0167], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 20:31:30,352 INFO [train.py:932] (2/4) Epoch 26, validation: loss=0.15, simple_loss=0.2481, pruned_loss=0.02599, over 944034.00 frames. +2023-03-09 20:31:30,353 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 20:31:31,869 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2486, 5.3274, 5.3565, 5.0690, 5.1451, 5.1222, 5.4150, 5.4330], + device='cuda:2'), covar=tensor([0.0057, 0.0050, 0.0050, 0.0093, 0.0052, 0.0133, 0.0049, 0.0064], + device='cuda:2'), in_proj_covar=tensor([0.0099, 0.0074, 0.0079, 0.0098, 0.0078, 0.0107, 0.0090, 0.0090], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 20:32:28,919 INFO [train.py:898] (2/4) Epoch 26, batch 3050, loss[loss=0.1417, simple_loss=0.2284, pruned_loss=0.02748, over 18361.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2466, pruned_loss=0.03262, over 3613500.91 frames. ], batch size: 46, lr: 4.29e-03, grad_scale: 16.0 +2023-03-09 20:32:32,222 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.657e+02 2.555e+02 3.073e+02 3.527e+02 8.787e+02, threshold=6.146e+02, percent-clipped=2.0 +2023-03-09 20:33:00,890 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.85 vs. limit=5.0 +2023-03-09 20:33:27,988 INFO [train.py:898] (2/4) Epoch 26, batch 3100, loss[loss=0.134, simple_loss=0.2204, pruned_loss=0.02378, over 18505.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2474, pruned_loss=0.0329, over 3604112.86 frames. ], batch size: 47, lr: 4.29e-03, grad_scale: 16.0 +2023-03-09 20:34:03,782 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93981.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:34:31,203 INFO [train.py:898] (2/4) Epoch 26, batch 3150, loss[loss=0.1626, simple_loss=0.2529, pruned_loss=0.03611, over 18367.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.248, pruned_loss=0.03319, over 3591371.87 frames. ], batch size: 50, lr: 4.29e-03, grad_scale: 16.0 +2023-03-09 20:34:34,547 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.522e+02 2.445e+02 2.866e+02 3.470e+02 6.892e+02, threshold=5.732e+02, percent-clipped=1.0 +2023-03-09 20:35:19,170 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=94042.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:35:29,160 INFO [train.py:898] (2/4) Epoch 26, batch 3200, loss[loss=0.1545, simple_loss=0.2421, pruned_loss=0.0334, over 18306.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2476, pruned_loss=0.03325, over 3572688.46 frames. ], batch size: 49, lr: 4.29e-03, grad_scale: 16.0 +2023-03-09 20:35:30,879 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.72 vs. limit=5.0 +2023-03-09 20:35:54,800 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8976, 2.9699, 2.0603, 3.3471, 2.4669, 2.9603, 2.3125, 2.7865], + device='cuda:2'), covar=tensor([0.0609, 0.0737, 0.1242, 0.0662, 0.0795, 0.0281, 0.1093, 0.0524], + device='cuda:2'), in_proj_covar=tensor([0.0223, 0.0231, 0.0194, 0.0295, 0.0197, 0.0269, 0.0205, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 20:36:01,984 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.75 vs. limit=5.0 +2023-03-09 20:36:02,874 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7200, 2.9645, 2.7310, 2.9177, 3.7401, 3.6834, 3.3147, 3.0445], + device='cuda:2'), covar=tensor([0.0156, 0.0325, 0.0515, 0.0415, 0.0189, 0.0141, 0.0360, 0.0331], + device='cuda:2'), in_proj_covar=tensor([0.0144, 0.0144, 0.0167, 0.0163, 0.0141, 0.0126, 0.0160, 0.0164], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 20:36:27,604 INFO [train.py:898] (2/4) Epoch 26, batch 3250, loss[loss=0.1536, simple_loss=0.2343, pruned_loss=0.03646, over 18435.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2476, pruned_loss=0.03349, over 3555941.05 frames. ], batch size: 43, lr: 4.29e-03, grad_scale: 16.0 +2023-03-09 20:36:31,046 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.794e+02 2.553e+02 3.024e+02 3.724e+02 8.014e+02, threshold=6.047e+02, percent-clipped=2.0 +2023-03-09 20:36:57,630 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=94126.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:37:00,473 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.07 vs. limit=5.0 +2023-03-09 20:37:26,333 INFO [train.py:898] (2/4) Epoch 26, batch 3300, loss[loss=0.1626, simple_loss=0.2539, pruned_loss=0.03566, over 18470.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2483, pruned_loss=0.03365, over 3548764.56 frames. ], batch size: 53, lr: 4.29e-03, grad_scale: 16.0 +2023-03-09 20:37:48,216 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-09 20:38:15,419 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1808, 5.2316, 5.4545, 5.4941, 5.1229, 6.0379, 5.6132, 5.2443], + device='cuda:2'), covar=tensor([0.1085, 0.0612, 0.0792, 0.0917, 0.1328, 0.0714, 0.0764, 0.1578], + device='cuda:2'), in_proj_covar=tensor([0.0365, 0.0301, 0.0323, 0.0328, 0.0337, 0.0438, 0.0293, 0.0427], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 20:38:24,386 INFO [train.py:898] (2/4) Epoch 26, batch 3350, loss[loss=0.1363, simple_loss=0.2244, pruned_loss=0.02408, over 18265.00 frames. ], tot_loss[loss=0.158, simple_loss=0.2486, pruned_loss=0.03368, over 3547161.31 frames. ], batch size: 45, lr: 4.29e-03, grad_scale: 8.0 +2023-03-09 20:38:28,979 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.721e+02 2.569e+02 3.071e+02 3.653e+02 6.732e+02, threshold=6.142e+02, percent-clipped=1.0 +2023-03-09 20:38:46,844 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0703, 3.7533, 5.2839, 3.0183, 4.5398, 2.6474, 3.1973, 1.9196], + device='cuda:2'), covar=tensor([0.1112, 0.1010, 0.0151, 0.0974, 0.0484, 0.2651, 0.2634, 0.2253], + device='cuda:2'), in_proj_covar=tensor([0.0228, 0.0250, 0.0222, 0.0207, 0.0265, 0.0277, 0.0335, 0.0244], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 20:38:50,862 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8881, 5.4088, 2.7813, 5.2355, 5.1243, 5.4283, 5.2215, 2.7692], + device='cuda:2'), covar=tensor([0.0236, 0.0058, 0.0779, 0.0071, 0.0069, 0.0069, 0.0087, 0.0968], + device='cuda:2'), in_proj_covar=tensor([0.0092, 0.0084, 0.0098, 0.0099, 0.0090, 0.0079, 0.0086, 0.0098], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 20:38:50,920 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=94223.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 20:39:23,070 INFO [train.py:898] (2/4) Epoch 26, batch 3400, loss[loss=0.158, simple_loss=0.2425, pruned_loss=0.0368, over 18271.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2475, pruned_loss=0.03334, over 3536976.93 frames. ], batch size: 47, lr: 4.29e-03, grad_scale: 8.0 +2023-03-09 20:39:29,334 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5993, 4.6258, 4.6954, 4.4315, 4.5007, 4.4805, 4.7436, 4.7322], + device='cuda:2'), covar=tensor([0.0076, 0.0073, 0.0065, 0.0120, 0.0065, 0.0156, 0.0082, 0.0107], + device='cuda:2'), in_proj_covar=tensor([0.0099, 0.0074, 0.0079, 0.0099, 0.0078, 0.0108, 0.0091, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 20:39:40,043 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0320, 3.8063, 5.2996, 3.0799, 4.6361, 2.6835, 3.1341, 1.9097], + device='cuda:2'), covar=tensor([0.1135, 0.0990, 0.0159, 0.0979, 0.0459, 0.2829, 0.2861, 0.2326], + device='cuda:2'), in_proj_covar=tensor([0.0227, 0.0249, 0.0221, 0.0205, 0.0263, 0.0275, 0.0333, 0.0242], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 20:40:02,618 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=94284.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 20:40:21,777 INFO [train.py:898] (2/4) Epoch 26, batch 3450, loss[loss=0.1566, simple_loss=0.2531, pruned_loss=0.03009, over 18250.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.2472, pruned_loss=0.03334, over 3543385.11 frames. ], batch size: 60, lr: 4.28e-03, grad_scale: 8.0 +2023-03-09 20:40:26,279 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.946e+02 2.477e+02 2.921e+02 3.603e+02 6.240e+02, threshold=5.842e+02, percent-clipped=1.0 +2023-03-09 20:40:28,804 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0093, 4.2343, 2.4865, 4.1767, 5.3190, 2.7122, 4.0451, 4.3323], + device='cuda:2'), covar=tensor([0.0180, 0.1106, 0.1635, 0.0622, 0.0090, 0.1178, 0.0602, 0.0552], + device='cuda:2'), in_proj_covar=tensor([0.0181, 0.0278, 0.0208, 0.0200, 0.0139, 0.0186, 0.0221, 0.0230], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 20:40:44,619 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-09 20:41:04,295 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=94337.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:41:17,133 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=94348.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:41:20,153 INFO [train.py:898] (2/4) Epoch 26, batch 3500, loss[loss=0.1523, simple_loss=0.2457, pruned_loss=0.02939, over 18285.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.2471, pruned_loss=0.03323, over 3557822.74 frames. ], batch size: 49, lr: 4.28e-03, grad_scale: 8.0 +2023-03-09 20:42:15,626 INFO [train.py:898] (2/4) Epoch 26, batch 3550, loss[loss=0.1538, simple_loss=0.2396, pruned_loss=0.03404, over 18171.00 frames. ], tot_loss[loss=0.1574, simple_loss=0.2478, pruned_loss=0.0335, over 3545983.83 frames. ], batch size: 44, lr: 4.28e-03, grad_scale: 8.0 +2023-03-09 20:42:20,523 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.890e+02 2.549e+02 2.986e+02 3.546e+02 5.816e+02, threshold=5.972e+02, percent-clipped=0.0 +2023-03-09 20:42:25,144 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=94409.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 20:42:40,050 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7244, 3.7214, 3.5663, 3.1776, 3.4956, 2.9459, 2.8207, 3.7186], + device='cuda:2'), covar=tensor([0.0066, 0.0085, 0.0088, 0.0139, 0.0097, 0.0184, 0.0226, 0.0072], + device='cuda:2'), in_proj_covar=tensor([0.0152, 0.0172, 0.0144, 0.0195, 0.0153, 0.0186, 0.0190, 0.0130], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 20:42:43,076 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94426.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:43:09,323 INFO [train.py:898] (2/4) Epoch 26, batch 3600, loss[loss=0.1505, simple_loss=0.2433, pruned_loss=0.02888, over 18414.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.2481, pruned_loss=0.03355, over 3558076.99 frames. ], batch size: 52, lr: 4.28e-03, grad_scale: 8.0 +2023-03-09 20:43:24,819 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6379, 2.3060, 2.5025, 2.5911, 3.1206, 4.7929, 4.7527, 3.1266], + device='cuda:2'), covar=tensor([0.2103, 0.2734, 0.3237, 0.2115, 0.2698, 0.0298, 0.0358, 0.1204], + device='cuda:2'), in_proj_covar=tensor([0.0325, 0.0361, 0.0407, 0.0288, 0.0397, 0.0263, 0.0304, 0.0270], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 20:43:34,267 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94474.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:44:11,701 INFO [train.py:898] (2/4) Epoch 27, batch 0, loss[loss=0.1597, simple_loss=0.2577, pruned_loss=0.03086, over 18246.00 frames. ], tot_loss[loss=0.1597, simple_loss=0.2577, pruned_loss=0.03086, over 18246.00 frames. ], batch size: 60, lr: 4.20e-03, grad_scale: 8.0 +2023-03-09 20:44:11,701 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 20:44:23,641 INFO [train.py:932] (2/4) Epoch 27, validation: loss=0.1494, simple_loss=0.2481, pruned_loss=0.02532, over 944034.00 frames. +2023-03-09 20:44:23,641 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 20:44:49,665 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.900e+02 2.525e+02 3.009e+02 3.747e+02 9.938e+02, threshold=6.019e+02, percent-clipped=2.0 +2023-03-09 20:45:22,319 INFO [train.py:898] (2/4) Epoch 27, batch 50, loss[loss=0.1492, simple_loss=0.2447, pruned_loss=0.02684, over 18557.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2468, pruned_loss=0.03281, over 806415.58 frames. ], batch size: 54, lr: 4.20e-03, grad_scale: 8.0 +2023-03-09 20:46:05,105 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8984, 4.1794, 2.6312, 4.0796, 5.2134, 2.9627, 3.7937, 3.7289], + device='cuda:2'), covar=tensor([0.0191, 0.1178, 0.1398, 0.0552, 0.0086, 0.0933, 0.0651, 0.0844], + device='cuda:2'), in_proj_covar=tensor([0.0182, 0.0278, 0.0208, 0.0201, 0.0140, 0.0187, 0.0223, 0.0231], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 20:46:13,925 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=94579.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 20:46:20,482 INFO [train.py:898] (2/4) Epoch 27, batch 100, loss[loss=0.1655, simple_loss=0.2584, pruned_loss=0.03631, over 18631.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2469, pruned_loss=0.03277, over 1424053.96 frames. ], batch size: 52, lr: 4.20e-03, grad_scale: 8.0 +2023-03-09 20:46:31,743 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-09 20:46:46,529 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.709e+02 2.657e+02 3.166e+02 3.640e+02 6.599e+02, threshold=6.333e+02, percent-clipped=4.0 +2023-03-09 20:46:48,042 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0506, 5.4054, 2.7528, 5.2996, 5.1691, 5.4680, 5.2672, 2.8242], + device='cuda:2'), covar=tensor([0.0213, 0.0065, 0.0778, 0.0064, 0.0069, 0.0065, 0.0076, 0.0914], + device='cuda:2'), in_proj_covar=tensor([0.0092, 0.0084, 0.0098, 0.0099, 0.0089, 0.0079, 0.0086, 0.0098], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-09 20:47:02,383 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-09 20:47:19,767 INFO [train.py:898] (2/4) Epoch 27, batch 150, loss[loss=0.1479, simple_loss=0.2392, pruned_loss=0.0283, over 18410.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2473, pruned_loss=0.03297, over 1918384.55 frames. ], batch size: 48, lr: 4.19e-03, grad_scale: 8.0 +2023-03-09 20:47:22,314 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94637.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:48:17,964 INFO [train.py:898] (2/4) Epoch 27, batch 200, loss[loss=0.1604, simple_loss=0.2579, pruned_loss=0.03146, over 18504.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2487, pruned_loss=0.03344, over 2294019.22 frames. ], batch size: 53, lr: 4.19e-03, grad_scale: 8.0 +2023-03-09 20:48:18,087 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94685.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:48:39,865 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=94704.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 20:48:41,887 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.913e+02 2.647e+02 3.071e+02 3.764e+02 1.112e+03, threshold=6.143e+02, percent-clipped=3.0 +2023-03-09 20:49:16,137 INFO [train.py:898] (2/4) Epoch 27, batch 250, loss[loss=0.1706, simple_loss=0.2626, pruned_loss=0.03936, over 18503.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2479, pruned_loss=0.03333, over 2589285.53 frames. ], batch size: 59, lr: 4.19e-03, grad_scale: 8.0 +2023-03-09 20:49:16,465 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2749, 5.2456, 4.8549, 5.1911, 5.1636, 4.5147, 5.0452, 4.8338], + device='cuda:2'), covar=tensor([0.0466, 0.0474, 0.1408, 0.0834, 0.0749, 0.0482, 0.0478, 0.1205], + device='cuda:2'), in_proj_covar=tensor([0.0509, 0.0574, 0.0717, 0.0446, 0.0472, 0.0523, 0.0558, 0.0689], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 20:50:14,324 INFO [train.py:898] (2/4) Epoch 27, batch 300, loss[loss=0.1657, simple_loss=0.2607, pruned_loss=0.03536, over 18426.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2468, pruned_loss=0.03279, over 2823638.79 frames. ], batch size: 52, lr: 4.19e-03, grad_scale: 8.0 +2023-03-09 20:50:38,014 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.940e+02 2.501e+02 2.879e+02 3.354e+02 8.790e+02, threshold=5.757e+02, percent-clipped=2.0 +2023-03-09 20:51:12,770 INFO [train.py:898] (2/4) Epoch 27, batch 350, loss[loss=0.1508, simple_loss=0.2354, pruned_loss=0.03308, over 17686.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.2471, pruned_loss=0.0332, over 2995853.80 frames. ], batch size: 39, lr: 4.19e-03, grad_scale: 8.0 +2023-03-09 20:51:17,727 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5678, 3.3070, 2.1893, 4.3063, 3.0073, 4.1220, 2.4093, 3.7634], + device='cuda:2'), covar=tensor([0.0598, 0.0908, 0.1549, 0.0442, 0.0919, 0.0330, 0.1291, 0.0466], + device='cuda:2'), in_proj_covar=tensor([0.0222, 0.0231, 0.0194, 0.0294, 0.0197, 0.0269, 0.0206, 0.0206], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 20:52:02,462 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 20:52:04,431 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94879.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 20:52:11,469 INFO [train.py:898] (2/4) Epoch 27, batch 400, loss[loss=0.1566, simple_loss=0.2526, pruned_loss=0.03029, over 18406.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2479, pruned_loss=0.03304, over 3136259.53 frames. ], batch size: 52, lr: 4.19e-03, grad_scale: 8.0 +2023-03-09 20:52:34,655 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0179, 5.4859, 5.4823, 5.4810, 4.9766, 5.4221, 4.8330, 5.3766], + device='cuda:2'), covar=tensor([0.0235, 0.0265, 0.0185, 0.0379, 0.0417, 0.0220, 0.1039, 0.0341], + device='cuda:2'), in_proj_covar=tensor([0.0235, 0.0278, 0.0280, 0.0356, 0.0287, 0.0287, 0.0321, 0.0281], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 20:52:35,460 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.622e+02 2.561e+02 2.993e+02 3.675e+02 9.257e+02, threshold=5.986e+02, percent-clipped=2.0 +2023-03-09 20:53:00,400 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-03-09 20:53:00,992 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=94927.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 20:53:10,450 INFO [train.py:898] (2/4) Epoch 27, batch 450, loss[loss=0.1372, simple_loss=0.2231, pruned_loss=0.02562, over 17745.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2483, pruned_loss=0.03306, over 3233287.70 frames. ], batch size: 39, lr: 4.19e-03, grad_scale: 8.0 +2023-03-09 20:53:13,066 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4435, 2.1170, 1.9864, 2.0649, 2.4947, 2.5181, 2.3161, 2.1249], + device='cuda:2'), covar=tensor([0.0218, 0.0232, 0.0486, 0.0418, 0.0232, 0.0208, 0.0395, 0.0349], + device='cuda:2'), in_proj_covar=tensor([0.0148, 0.0146, 0.0171, 0.0167, 0.0144, 0.0129, 0.0164, 0.0168], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 20:53:33,788 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-09 20:54:08,448 INFO [train.py:898] (2/4) Epoch 27, batch 500, loss[loss=0.1419, simple_loss=0.234, pruned_loss=0.0249, over 18526.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2481, pruned_loss=0.03315, over 3318691.51 frames. ], batch size: 49, lr: 4.19e-03, grad_scale: 8.0 +2023-03-09 20:54:31,093 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95004.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:54:33,119 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.646e+02 2.379e+02 2.893e+02 3.506e+02 5.482e+02, threshold=5.786e+02, percent-clipped=0.0 +2023-03-09 20:55:06,356 INFO [train.py:898] (2/4) Epoch 27, batch 550, loss[loss=0.1738, simple_loss=0.2649, pruned_loss=0.04134, over 18338.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2481, pruned_loss=0.0333, over 3391831.28 frames. ], batch size: 56, lr: 4.19e-03, grad_scale: 8.0 +2023-03-09 20:55:26,979 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95052.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:56:04,277 INFO [train.py:898] (2/4) Epoch 27, batch 600, loss[loss=0.1453, simple_loss=0.2274, pruned_loss=0.03162, over 18527.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.2482, pruned_loss=0.03339, over 3429291.41 frames. ], batch size: 49, lr: 4.19e-03, grad_scale: 8.0 +2023-03-09 20:56:10,437 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-09 20:56:28,291 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-09 20:56:28,736 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95105.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 20:56:29,343 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.684e+02 2.616e+02 3.098e+02 3.741e+02 7.084e+02, threshold=6.196e+02, percent-clipped=4.0 +2023-03-09 20:56:50,742 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-03-09 20:57:02,881 INFO [train.py:898] (2/4) Epoch 27, batch 650, loss[loss=0.1774, simple_loss=0.2618, pruned_loss=0.04653, over 12448.00 frames. ], tot_loss[loss=0.1579, simple_loss=0.2482, pruned_loss=0.03375, over 3444436.83 frames. ], batch size: 129, lr: 4.18e-03, grad_scale: 8.0 +2023-03-09 20:57:40,203 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95166.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 20:58:02,041 INFO [train.py:898] (2/4) Epoch 27, batch 700, loss[loss=0.1811, simple_loss=0.2704, pruned_loss=0.04593, over 18465.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2483, pruned_loss=0.03367, over 3484644.03 frames. ], batch size: 59, lr: 4.18e-03, grad_scale: 8.0 +2023-03-09 20:58:12,364 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6104, 2.4161, 2.5355, 2.6149, 3.0104, 4.2276, 4.1424, 3.1716], + device='cuda:2'), covar=tensor([0.2032, 0.2519, 0.3034, 0.2018, 0.2485, 0.0385, 0.0470, 0.1065], + device='cuda:2'), in_proj_covar=tensor([0.0327, 0.0362, 0.0408, 0.0290, 0.0398, 0.0265, 0.0305, 0.0271], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 20:58:28,140 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.730e+02 2.528e+02 2.858e+02 3.402e+02 6.027e+02, threshold=5.717e+02, percent-clipped=1.0 +2023-03-09 20:58:46,357 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8704, 4.1854, 2.4728, 4.0020, 5.2078, 2.7493, 3.6284, 3.8355], + device='cuda:2'), covar=tensor([0.0196, 0.1220, 0.1395, 0.0579, 0.0069, 0.1022, 0.0659, 0.0773], + device='cuda:2'), in_proj_covar=tensor([0.0180, 0.0275, 0.0205, 0.0198, 0.0138, 0.0185, 0.0219, 0.0228], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 20:58:55,445 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6867, 2.5158, 2.6880, 2.7364, 3.3449, 5.0234, 4.9616, 3.3276], + device='cuda:2'), covar=tensor([0.2047, 0.2446, 0.2904, 0.1939, 0.2288, 0.0231, 0.0325, 0.1100], + device='cuda:2'), in_proj_covar=tensor([0.0327, 0.0362, 0.0408, 0.0290, 0.0398, 0.0265, 0.0304, 0.0271], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 20:59:01,259 INFO [train.py:898] (2/4) Epoch 27, batch 750, loss[loss=0.1561, simple_loss=0.2488, pruned_loss=0.03164, over 16189.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.2483, pruned_loss=0.03351, over 3511027.92 frames. ], batch size: 94, lr: 4.18e-03, grad_scale: 8.0 +2023-03-09 20:59:12,184 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1932, 5.2063, 5.5547, 5.5147, 5.1990, 6.0276, 5.6710, 5.2955], + device='cuda:2'), covar=tensor([0.1209, 0.0720, 0.0771, 0.0767, 0.1341, 0.0711, 0.0703, 0.1590], + device='cuda:2'), in_proj_covar=tensor([0.0375, 0.0306, 0.0330, 0.0335, 0.0343, 0.0447, 0.0298, 0.0436], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 20:59:12,745 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-09 20:59:31,753 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95260.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 20:59:37,275 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9581, 5.0091, 5.0514, 4.7870, 4.7794, 4.8060, 5.1203, 5.1183], + device='cuda:2'), covar=tensor([0.0076, 0.0069, 0.0068, 0.0116, 0.0062, 0.0179, 0.0076, 0.0100], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0074, 0.0080, 0.0100, 0.0079, 0.0108, 0.0091, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 20:59:40,864 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95268.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:00:00,185 INFO [train.py:898] (2/4) Epoch 27, batch 800, loss[loss=0.1682, simple_loss=0.2597, pruned_loss=0.03833, over 18241.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2475, pruned_loss=0.03349, over 3516826.02 frames. ], batch size: 60, lr: 4.18e-03, grad_scale: 8.0 +2023-03-09 21:00:25,349 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.793e+02 2.511e+02 2.986e+02 3.454e+02 7.557e+02, threshold=5.973e+02, percent-clipped=4.0 +2023-03-09 21:00:43,488 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95321.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:00:52,368 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8389, 5.3342, 5.3122, 5.3276, 4.8238, 5.2702, 4.5999, 5.1491], + device='cuda:2'), covar=tensor([0.0264, 0.0291, 0.0219, 0.0420, 0.0409, 0.0236, 0.1181, 0.0424], + device='cuda:2'), in_proj_covar=tensor([0.0236, 0.0277, 0.0279, 0.0357, 0.0287, 0.0286, 0.0321, 0.0281], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 21:00:52,445 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95329.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:00:58,803 INFO [train.py:898] (2/4) Epoch 27, batch 850, loss[loss=0.1814, simple_loss=0.2734, pruned_loss=0.04474, over 18288.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.2482, pruned_loss=0.03346, over 3536351.38 frames. ], batch size: 57, lr: 4.18e-03, grad_scale: 8.0 +2023-03-09 21:01:04,281 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95339.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:01:57,808 INFO [train.py:898] (2/4) Epoch 27, batch 900, loss[loss=0.1671, simple_loss=0.2579, pruned_loss=0.03817, over 18468.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2483, pruned_loss=0.03316, over 3546237.02 frames. ], batch size: 53, lr: 4.18e-03, grad_scale: 8.0 +2023-03-09 21:02:10,629 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5979, 2.3671, 2.5942, 2.6226, 3.1219, 4.7977, 4.7494, 3.2867], + device='cuda:2'), covar=tensor([0.2109, 0.2737, 0.3243, 0.2083, 0.2621, 0.0290, 0.0392, 0.1112], + device='cuda:2'), in_proj_covar=tensor([0.0327, 0.0362, 0.0408, 0.0290, 0.0397, 0.0265, 0.0305, 0.0270], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 21:02:16,684 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95400.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:02:24,174 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.646e+02 2.410e+02 2.846e+02 3.556e+02 5.953e+02, threshold=5.693e+02, percent-clipped=0.0 +2023-03-09 21:02:57,592 INFO [train.py:898] (2/4) Epoch 27, batch 950, loss[loss=0.1666, simple_loss=0.2579, pruned_loss=0.03764, over 18134.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2477, pruned_loss=0.03279, over 3568035.54 frames. ], batch size: 62, lr: 4.18e-03, grad_scale: 8.0 +2023-03-09 21:03:28,544 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95461.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 21:03:55,817 INFO [train.py:898] (2/4) Epoch 27, batch 1000, loss[loss=0.1497, simple_loss=0.2463, pruned_loss=0.02653, over 18567.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2484, pruned_loss=0.0331, over 3580024.49 frames. ], batch size: 49, lr: 4.18e-03, grad_scale: 8.0 +2023-03-09 21:04:19,640 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.888e+02 2.625e+02 2.944e+02 3.804e+02 7.534e+02, threshold=5.888e+02, percent-clipped=3.0 +2023-03-09 21:04:40,537 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5123, 6.0444, 5.5893, 5.8065, 5.5961, 5.4314, 6.0626, 6.0547], + device='cuda:2'), covar=tensor([0.1169, 0.0731, 0.0518, 0.0718, 0.1532, 0.0741, 0.0614, 0.0683], + device='cuda:2'), in_proj_covar=tensor([0.0630, 0.0557, 0.0402, 0.0577, 0.0775, 0.0578, 0.0787, 0.0599], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 21:04:54,002 INFO [train.py:898] (2/4) Epoch 27, batch 1050, loss[loss=0.1509, simple_loss=0.2325, pruned_loss=0.03468, over 18271.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2481, pruned_loss=0.03311, over 3587037.63 frames. ], batch size: 45, lr: 4.18e-03, grad_scale: 8.0 +2023-03-09 21:05:19,307 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-03-09 21:05:53,061 INFO [train.py:898] (2/4) Epoch 27, batch 1100, loss[loss=0.1463, simple_loss=0.2253, pruned_loss=0.03368, over 18493.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.2485, pruned_loss=0.03324, over 3595690.62 frames. ], batch size: 44, lr: 4.17e-03, grad_scale: 8.0 +2023-03-09 21:06:17,709 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.642e+02 2.398e+02 2.884e+02 3.392e+02 9.491e+02, threshold=5.768e+02, percent-clipped=3.0 +2023-03-09 21:06:30,353 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95616.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:06:31,639 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95617.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:06:39,759 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95624.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:06:43,706 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5203, 5.4747, 5.1420, 5.3916, 5.4298, 4.8161, 5.3310, 5.0913], + device='cuda:2'), covar=tensor([0.0392, 0.0374, 0.1143, 0.0807, 0.0534, 0.0389, 0.0404, 0.1044], + device='cuda:2'), in_proj_covar=tensor([0.0515, 0.0584, 0.0725, 0.0450, 0.0473, 0.0530, 0.0564, 0.0701], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 21:06:52,367 INFO [train.py:898] (2/4) Epoch 27, batch 1150, loss[loss=0.1523, simple_loss=0.2395, pruned_loss=0.03256, over 18281.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.2475, pruned_loss=0.03309, over 3598645.36 frames. ], batch size: 49, lr: 4.17e-03, grad_scale: 4.0 +2023-03-09 21:07:43,503 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95678.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:07:51,501 INFO [train.py:898] (2/4) Epoch 27, batch 1200, loss[loss=0.1643, simple_loss=0.2544, pruned_loss=0.0371, over 18003.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.2473, pruned_loss=0.0331, over 3591256.19 frames. ], batch size: 65, lr: 4.17e-03, grad_scale: 8.0 +2023-03-09 21:08:01,814 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3731, 5.9495, 5.5259, 5.7599, 5.6086, 5.3741, 6.0223, 6.0059], + device='cuda:2'), covar=tensor([0.1225, 0.0822, 0.0583, 0.0713, 0.1328, 0.0708, 0.0531, 0.0643], + device='cuda:2'), in_proj_covar=tensor([0.0634, 0.0559, 0.0403, 0.0580, 0.0780, 0.0582, 0.0790, 0.0604], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 21:08:02,903 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95695.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:08:06,058 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-03-09 21:08:16,309 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.614e+02 2.547e+02 2.881e+02 3.580e+02 8.977e+02, threshold=5.762e+02, percent-clipped=2.0 +2023-03-09 21:08:50,138 INFO [train.py:898] (2/4) Epoch 27, batch 1250, loss[loss=0.1723, simple_loss=0.2627, pruned_loss=0.04091, over 18161.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.2476, pruned_loss=0.03314, over 3592341.75 frames. ], batch size: 60, lr: 4.17e-03, grad_scale: 8.0 +2023-03-09 21:09:02,203 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4378, 2.0807, 2.1100, 2.0982, 2.4308, 2.4810, 2.3456, 2.0931], + device='cuda:2'), covar=tensor([0.0249, 0.0297, 0.0495, 0.0421, 0.0252, 0.0226, 0.0428, 0.0355], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0146, 0.0170, 0.0164, 0.0142, 0.0128, 0.0162, 0.0166], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 21:09:20,662 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95761.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 21:09:49,141 INFO [train.py:898] (2/4) Epoch 27, batch 1300, loss[loss=0.1415, simple_loss=0.2233, pruned_loss=0.02983, over 18447.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2475, pruned_loss=0.03336, over 3598741.41 frames. ], batch size: 43, lr: 4.17e-03, grad_scale: 8.0 +2023-03-09 21:09:49,619 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0732, 3.8300, 5.1366, 4.6673, 3.5274, 3.2525, 4.6936, 5.4657], + device='cuda:2'), covar=tensor([0.0769, 0.1458, 0.0230, 0.0342, 0.0912, 0.1175, 0.0366, 0.0178], + device='cuda:2'), in_proj_covar=tensor([0.0155, 0.0282, 0.0170, 0.0186, 0.0196, 0.0198, 0.0200, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 21:09:52,382 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5794, 2.4192, 4.2995, 3.7877, 2.2527, 4.5643, 3.7552, 2.7121], + device='cuda:2'), covar=tensor([0.0501, 0.2053, 0.0314, 0.0385, 0.2215, 0.0243, 0.0636, 0.1235], + device='cuda:2'), in_proj_covar=tensor([0.0221, 0.0245, 0.0232, 0.0172, 0.0228, 0.0219, 0.0258, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 21:10:15,107 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.836e+02 2.534e+02 2.954e+02 3.886e+02 9.660e+02, threshold=5.908e+02, percent-clipped=7.0 +2023-03-09 21:10:17,504 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95809.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 21:10:21,565 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95812.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 21:10:48,201 INFO [train.py:898] (2/4) Epoch 27, batch 1350, loss[loss=0.1412, simple_loss=0.226, pruned_loss=0.02816, over 18366.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2474, pruned_loss=0.03346, over 3595819.72 frames. ], batch size: 46, lr: 4.17e-03, grad_scale: 8.0 +2023-03-09 21:11:02,596 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95847.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:11:10,528 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8604, 4.5322, 4.5699, 3.5206, 3.8409, 3.6204, 2.7502, 2.8721], + device='cuda:2'), covar=tensor([0.0234, 0.0149, 0.0090, 0.0323, 0.0352, 0.0216, 0.0734, 0.0742], + device='cuda:2'), in_proj_covar=tensor([0.0076, 0.0065, 0.0069, 0.0073, 0.0095, 0.0072, 0.0081, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 21:11:17,049 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9394, 3.6752, 5.0554, 3.0651, 4.4300, 2.6741, 3.1178, 1.8447], + device='cuda:2'), covar=tensor([0.1267, 0.0962, 0.0165, 0.0922, 0.0484, 0.2501, 0.2722, 0.2325], + device='cuda:2'), in_proj_covar=tensor([0.0231, 0.0254, 0.0226, 0.0209, 0.0268, 0.0280, 0.0337, 0.0246], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 21:11:25,382 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0591, 4.2454, 2.5596, 4.1254, 5.3038, 2.7406, 4.0108, 4.1726], + device='cuda:2'), covar=tensor([0.0190, 0.1233, 0.1698, 0.0704, 0.0090, 0.1340, 0.0650, 0.0647], + device='cuda:2'), in_proj_covar=tensor([0.0183, 0.0281, 0.0209, 0.0201, 0.0141, 0.0188, 0.0222, 0.0232], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 21:11:32,157 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95873.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 21:11:45,714 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0905, 3.8131, 5.1703, 3.0830, 4.5446, 2.6753, 3.2167, 1.7562], + device='cuda:2'), covar=tensor([0.1144, 0.0961, 0.0181, 0.0958, 0.0489, 0.2612, 0.2731, 0.2390], + device='cuda:2'), in_proj_covar=tensor([0.0232, 0.0255, 0.0227, 0.0210, 0.0270, 0.0282, 0.0338, 0.0248], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 21:11:46,352 INFO [train.py:898] (2/4) Epoch 27, batch 1400, loss[loss=0.1458, simple_loss=0.2366, pruned_loss=0.02752, over 18292.00 frames. ], tot_loss[loss=0.158, simple_loss=0.2484, pruned_loss=0.03387, over 3591931.59 frames. ], batch size: 49, lr: 4.17e-03, grad_scale: 8.0 +2023-03-09 21:12:11,666 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.788e+02 2.502e+02 2.884e+02 3.426e+02 1.018e+03, threshold=5.768e+02, percent-clipped=3.0 +2023-03-09 21:12:13,255 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95908.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 21:12:22,271 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95916.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:12:26,831 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.94 vs. limit=5.0 +2023-03-09 21:12:32,123 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95924.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:12:45,221 INFO [train.py:898] (2/4) Epoch 27, batch 1450, loss[loss=0.1551, simple_loss=0.2417, pruned_loss=0.03429, over 18515.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.2481, pruned_loss=0.03356, over 3595365.33 frames. ], batch size: 47, lr: 4.17e-03, grad_scale: 8.0 +2023-03-09 21:13:19,366 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95964.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:13:28,476 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=95972.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:13:30,273 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95973.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:13:38,782 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95980.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:13:44,686 INFO [train.py:898] (2/4) Epoch 27, batch 1500, loss[loss=0.1371, simple_loss=0.2315, pruned_loss=0.02133, over 18257.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.248, pruned_loss=0.03333, over 3588209.58 frames. ], batch size: 47, lr: 4.17e-03, grad_scale: 8.0 +2023-03-09 21:13:45,714 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-03-09 21:13:56,531 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95995.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:14:15,335 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.678e+02 2.536e+02 3.010e+02 3.569e+02 8.325e+02, threshold=6.021e+02, percent-clipped=2.0 +2023-03-09 21:14:23,843 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6687, 2.4571, 2.6018, 2.7013, 3.2033, 4.7441, 4.7363, 3.0307], + device='cuda:2'), covar=tensor([0.2174, 0.2679, 0.3162, 0.2098, 0.2614, 0.0321, 0.0378, 0.1316], + device='cuda:2'), in_proj_covar=tensor([0.0328, 0.0364, 0.0410, 0.0291, 0.0399, 0.0266, 0.0305, 0.0273], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001], + device='cuda:2') +2023-03-09 21:14:47,889 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5734, 5.0690, 5.0404, 5.1594, 4.4683, 5.0123, 4.0125, 4.9339], + device='cuda:2'), covar=tensor([0.0369, 0.0481, 0.0328, 0.0564, 0.0539, 0.0339, 0.1985, 0.0457], + device='cuda:2'), in_proj_covar=tensor([0.0235, 0.0275, 0.0277, 0.0357, 0.0286, 0.0287, 0.0320, 0.0281], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 21:14:48,690 INFO [train.py:898] (2/4) Epoch 27, batch 1550, loss[loss=0.185, simple_loss=0.2792, pruned_loss=0.04534, over 18119.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.2472, pruned_loss=0.03314, over 3585219.17 frames. ], batch size: 62, lr: 4.16e-03, grad_scale: 8.0 +2023-03-09 21:14:52,416 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.97 vs. limit=5.0 +2023-03-09 21:14:56,596 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96041.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:14:58,711 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96043.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:15:42,231 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96080.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:15:47,441 INFO [train.py:898] (2/4) Epoch 27, batch 1600, loss[loss=0.1473, simple_loss=0.2396, pruned_loss=0.02748, over 18285.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2477, pruned_loss=0.03317, over 3575162.06 frames. ], batch size: 49, lr: 4.16e-03, grad_scale: 8.0 +2023-03-09 21:16:13,689 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.626e+02 2.554e+02 2.952e+02 3.654e+02 6.643e+02, threshold=5.903e+02, percent-clipped=2.0 +2023-03-09 21:16:38,539 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3603, 5.3092, 5.6587, 5.7141, 5.3119, 6.2250, 5.8630, 5.5137], + device='cuda:2'), covar=tensor([0.1217, 0.0683, 0.0642, 0.0884, 0.1433, 0.0722, 0.0711, 0.1670], + device='cuda:2'), in_proj_covar=tensor([0.0371, 0.0303, 0.0328, 0.0330, 0.0338, 0.0442, 0.0295, 0.0433], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 21:16:43,994 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-03-09 21:16:45,554 INFO [train.py:898] (2/4) Epoch 27, batch 1650, loss[loss=0.1555, simple_loss=0.2522, pruned_loss=0.02937, over 18349.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.248, pruned_loss=0.03374, over 3560896.30 frames. ], batch size: 55, lr: 4.16e-03, grad_scale: 8.0 +2023-03-09 21:16:53,578 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96141.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:17:21,266 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96165.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:17:21,687 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.68 vs. limit=2.0 +2023-03-09 21:17:24,607 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96168.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 21:17:44,174 INFO [train.py:898] (2/4) Epoch 27, batch 1700, loss[loss=0.168, simple_loss=0.258, pruned_loss=0.03902, over 18400.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2479, pruned_loss=0.03371, over 3563096.57 frames. ], batch size: 52, lr: 4.16e-03, grad_scale: 8.0 +2023-03-09 21:18:06,266 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96203.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 21:18:10,515 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.955e+02 2.460e+02 3.228e+02 3.841e+02 7.050e+02, threshold=6.456e+02, percent-clipped=7.0 +2023-03-09 21:18:32,822 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96226.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:18:42,662 INFO [train.py:898] (2/4) Epoch 27, batch 1750, loss[loss=0.1598, simple_loss=0.2558, pruned_loss=0.03184, over 18355.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.2478, pruned_loss=0.03362, over 3569090.62 frames. ], batch size: 55, lr: 4.16e-03, grad_scale: 8.0 +2023-03-09 21:19:27,920 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96273.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:19:32,697 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1034, 5.1521, 5.2420, 4.9399, 5.0109, 4.9392, 5.3147, 5.2859], + device='cuda:2'), covar=tensor([0.0064, 0.0061, 0.0044, 0.0101, 0.0048, 0.0151, 0.0055, 0.0081], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0074, 0.0080, 0.0100, 0.0079, 0.0107, 0.0091, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 21:19:41,559 INFO [train.py:898] (2/4) Epoch 27, batch 1800, loss[loss=0.1413, simple_loss=0.2218, pruned_loss=0.03036, over 18364.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2465, pruned_loss=0.03326, over 3576982.27 frames. ], batch size: 42, lr: 4.16e-03, grad_scale: 8.0 +2023-03-09 21:20:07,993 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.809e+02 2.546e+02 2.957e+02 3.630e+02 5.615e+02, threshold=5.915e+02, percent-clipped=0.0 +2023-03-09 21:20:24,641 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96321.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:20:28,164 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96324.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:20:40,274 INFO [train.py:898] (2/4) Epoch 27, batch 1850, loss[loss=0.1772, simple_loss=0.2666, pruned_loss=0.04388, over 15887.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2468, pruned_loss=0.03316, over 3584902.31 frames. ], batch size: 94, lr: 4.16e-03, grad_scale: 8.0 +2023-03-09 21:20:41,716 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96336.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:21:26,226 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4182, 2.7684, 2.4057, 2.6937, 3.4684, 3.3166, 3.0237, 2.7101], + device='cuda:2'), covar=tensor([0.0183, 0.0313, 0.0635, 0.0405, 0.0207, 0.0194, 0.0379, 0.0409], + device='cuda:2'), in_proj_covar=tensor([0.0145, 0.0146, 0.0168, 0.0166, 0.0141, 0.0128, 0.0161, 0.0164], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 21:21:37,318 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6921, 4.4011, 4.7626, 4.4172, 4.4203, 4.6820, 4.7878, 4.6456], + device='cuda:2'), covar=tensor([0.0101, 0.0146, 0.0109, 0.0166, 0.0107, 0.0167, 0.0127, 0.0183], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0075, 0.0080, 0.0100, 0.0079, 0.0107, 0.0091, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 21:21:37,991 INFO [train.py:898] (2/4) Epoch 27, batch 1900, loss[loss=0.1862, simple_loss=0.2697, pruned_loss=0.05138, over 17864.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.2472, pruned_loss=0.03324, over 3588152.05 frames. ], batch size: 65, lr: 4.16e-03, grad_scale: 8.0 +2023-03-09 21:21:38,384 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96385.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:21:44,168 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96390.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:22:04,760 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.806e+02 2.747e+02 3.313e+02 4.135e+02 7.560e+02, threshold=6.625e+02, percent-clipped=3.0 +2023-03-09 21:22:36,435 INFO [train.py:898] (2/4) Epoch 27, batch 1950, loss[loss=0.1674, simple_loss=0.2658, pruned_loss=0.03447, over 18151.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2474, pruned_loss=0.03339, over 3583162.42 frames. ], batch size: 62, lr: 4.16e-03, grad_scale: 8.0 +2023-03-09 21:22:37,791 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96436.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:22:55,503 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96451.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:23:15,696 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96468.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 21:23:35,133 INFO [train.py:898] (2/4) Epoch 27, batch 2000, loss[loss=0.1534, simple_loss=0.244, pruned_loss=0.03137, over 18373.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.248, pruned_loss=0.03347, over 3577663.25 frames. ], batch size: 50, lr: 4.15e-03, grad_scale: 8.0 +2023-03-09 21:23:56,279 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96503.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:24:00,974 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.734e+02 2.669e+02 3.146e+02 3.749e+02 6.001e+02, threshold=6.292e+02, percent-clipped=0.0 +2023-03-09 21:24:11,842 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96516.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 21:24:17,449 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96521.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:24:33,560 INFO [train.py:898] (2/4) Epoch 27, batch 2050, loss[loss=0.1542, simple_loss=0.243, pruned_loss=0.03272, over 18397.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2483, pruned_loss=0.03351, over 3581442.68 frames. ], batch size: 48, lr: 4.15e-03, grad_scale: 8.0 +2023-03-09 21:24:51,979 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96551.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:25:10,999 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1902, 5.2298, 5.4778, 5.4666, 5.1865, 6.0191, 5.6871, 5.1604], + device='cuda:2'), covar=tensor([0.1218, 0.0712, 0.0804, 0.0966, 0.1431, 0.0770, 0.0755, 0.1880], + device='cuda:2'), in_proj_covar=tensor([0.0375, 0.0304, 0.0330, 0.0333, 0.0342, 0.0445, 0.0298, 0.0436], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 21:25:32,787 INFO [train.py:898] (2/4) Epoch 27, batch 2100, loss[loss=0.1498, simple_loss=0.2437, pruned_loss=0.02796, over 18396.00 frames. ], tot_loss[loss=0.158, simple_loss=0.2488, pruned_loss=0.03357, over 3594839.53 frames. ], batch size: 48, lr: 4.15e-03, grad_scale: 8.0 +2023-03-09 21:25:58,866 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.510e+02 2.412e+02 2.976e+02 3.502e+02 7.599e+02, threshold=5.952e+02, percent-clipped=1.0 +2023-03-09 21:26:32,721 INFO [train.py:898] (2/4) Epoch 27, batch 2150, loss[loss=0.1609, simple_loss=0.2539, pruned_loss=0.03398, over 18479.00 frames. ], tot_loss[loss=0.1578, simple_loss=0.2489, pruned_loss=0.0334, over 3600465.96 frames. ], batch size: 53, lr: 4.15e-03, grad_scale: 8.0 +2023-03-09 21:26:34,142 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96636.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:27:25,096 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6785, 3.6056, 2.5350, 4.4612, 3.3136, 4.3025, 2.9080, 4.0956], + device='cuda:2'), covar=tensor([0.0624, 0.0756, 0.1304, 0.0472, 0.0732, 0.0273, 0.0978, 0.0393], + device='cuda:2'), in_proj_covar=tensor([0.0223, 0.0231, 0.0196, 0.0295, 0.0196, 0.0272, 0.0205, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 21:27:26,060 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96680.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:27:30,970 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96684.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:27:31,887 INFO [train.py:898] (2/4) Epoch 27, batch 2200, loss[loss=0.1784, simple_loss=0.2743, pruned_loss=0.04128, over 16390.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.248, pruned_loss=0.0333, over 3587771.91 frames. ], batch size: 95, lr: 4.15e-03, grad_scale: 8.0 +2023-03-09 21:27:56,877 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 2.105e+02 2.631e+02 3.073e+02 3.700e+02 7.959e+02, threshold=6.147e+02, percent-clipped=1.0 +2023-03-09 21:28:29,901 INFO [train.py:898] (2/4) Epoch 27, batch 2250, loss[loss=0.1685, simple_loss=0.2594, pruned_loss=0.03884, over 18458.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2477, pruned_loss=0.03327, over 3585465.22 frames. ], batch size: 59, lr: 4.15e-03, grad_scale: 8.0 +2023-03-09 21:28:31,925 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96736.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:28:43,248 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96746.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:29:18,848 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96776.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:29:27,623 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96784.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:29:28,589 INFO [train.py:898] (2/4) Epoch 27, batch 2300, loss[loss=0.18, simple_loss=0.2667, pruned_loss=0.04669, over 12431.00 frames. ], tot_loss[loss=0.1577, simple_loss=0.2484, pruned_loss=0.03346, over 3583314.66 frames. ], batch size: 131, lr: 4.15e-03, grad_scale: 8.0 +2023-03-09 21:29:53,977 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.778e+02 2.521e+02 2.973e+02 3.966e+02 7.107e+02, threshold=5.946e+02, percent-clipped=4.0 +2023-03-09 21:30:03,121 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-03-09 21:30:11,746 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96821.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:30:12,044 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-03-09 21:30:27,096 INFO [train.py:898] (2/4) Epoch 27, batch 2350, loss[loss=0.1584, simple_loss=0.2576, pruned_loss=0.0296, over 18355.00 frames. ], tot_loss[loss=0.158, simple_loss=0.2488, pruned_loss=0.03364, over 3587613.56 frames. ], batch size: 55, lr: 4.15e-03, grad_scale: 8.0 +2023-03-09 21:30:29,658 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96837.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:31:07,528 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=96869.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:31:26,997 INFO [train.py:898] (2/4) Epoch 27, batch 2400, loss[loss=0.1332, simple_loss=0.2156, pruned_loss=0.02536, over 18572.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2472, pruned_loss=0.03308, over 3598356.14 frames. ], batch size: 45, lr: 4.15e-03, grad_scale: 8.0 +2023-03-09 21:31:46,412 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-03-09 21:31:52,345 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.581e+02 2.380e+02 2.700e+02 3.447e+02 6.459e+02, threshold=5.400e+02, percent-clipped=1.0 +2023-03-09 21:32:25,337 INFO [train.py:898] (2/4) Epoch 27, batch 2450, loss[loss=0.1401, simple_loss=0.2272, pruned_loss=0.02652, over 18298.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2475, pruned_loss=0.03293, over 3602588.72 frames. ], batch size: 49, lr: 4.14e-03, grad_scale: 8.0 +2023-03-09 21:33:19,355 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96980.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:33:22,428 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-03-09 21:33:24,802 INFO [train.py:898] (2/4) Epoch 27, batch 2500, loss[loss=0.1639, simple_loss=0.2523, pruned_loss=0.03774, over 18269.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2467, pruned_loss=0.03242, over 3605583.07 frames. ], batch size: 47, lr: 4.14e-03, grad_scale: 8.0 +2023-03-09 21:33:41,776 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-09 21:33:42,726 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97000.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:33:50,768 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.816e+02 2.553e+02 2.843e+02 3.348e+02 7.332e+02, threshold=5.685e+02, percent-clipped=3.0 +2023-03-09 21:34:15,108 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97028.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:34:16,517 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6883, 2.5884, 2.7003, 2.7747, 3.2719, 4.9685, 4.8907, 3.1278], + device='cuda:2'), covar=tensor([0.2067, 0.2470, 0.3213, 0.1991, 0.2411, 0.0245, 0.0357, 0.1210], + device='cuda:2'), in_proj_covar=tensor([0.0330, 0.0365, 0.0412, 0.0293, 0.0400, 0.0267, 0.0305, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 21:34:23,790 INFO [train.py:898] (2/4) Epoch 27, batch 2550, loss[loss=0.1585, simple_loss=0.2512, pruned_loss=0.03288, over 16208.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2464, pruned_loss=0.03269, over 3588430.89 frames. ], batch size: 94, lr: 4.14e-03, grad_scale: 8.0 +2023-03-09 21:34:36,686 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97046.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:34:54,337 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97061.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:34:55,442 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1778, 5.1980, 5.2779, 4.9824, 5.0160, 5.0137, 5.3842, 5.3460], + device='cuda:2'), covar=tensor([0.0073, 0.0064, 0.0052, 0.0109, 0.0058, 0.0159, 0.0063, 0.0098], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0075, 0.0080, 0.0100, 0.0080, 0.0109, 0.0091, 0.0092], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 21:35:22,689 INFO [train.py:898] (2/4) Epoch 27, batch 2600, loss[loss=0.1695, simple_loss=0.2653, pruned_loss=0.03681, over 17969.00 frames. ], tot_loss[loss=0.1563, simple_loss=0.2467, pruned_loss=0.03294, over 3590414.25 frames. ], batch size: 65, lr: 4.14e-03, grad_scale: 8.0 +2023-03-09 21:35:32,735 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97094.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:35:47,783 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.742e+02 2.535e+02 2.936e+02 3.647e+02 8.103e+02, threshold=5.871e+02, percent-clipped=2.0 +2023-03-09 21:36:16,475 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97132.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:36:19,913 INFO [train.py:898] (2/4) Epoch 27, batch 2650, loss[loss=0.1618, simple_loss=0.2571, pruned_loss=0.03324, over 18059.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.2465, pruned_loss=0.03287, over 3603816.09 frames. ], batch size: 62, lr: 4.14e-03, grad_scale: 8.0 +2023-03-09 21:37:18,528 INFO [train.py:898] (2/4) Epoch 27, batch 2700, loss[loss=0.1442, simple_loss=0.2251, pruned_loss=0.03168, over 18443.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.2468, pruned_loss=0.03303, over 3603114.53 frames. ], batch size: 43, lr: 4.14e-03, grad_scale: 8.0 +2023-03-09 21:37:38,548 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9095, 4.0762, 2.5403, 4.1142, 5.2597, 2.9210, 3.8933, 3.9333], + device='cuda:2'), covar=tensor([0.0213, 0.1539, 0.1772, 0.0758, 0.0112, 0.1187, 0.0724, 0.0867], + device='cuda:2'), in_proj_covar=tensor([0.0184, 0.0283, 0.0211, 0.0201, 0.0142, 0.0188, 0.0224, 0.0233], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 21:37:38,978 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-03-09 21:37:44,982 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.852e+02 2.663e+02 3.178e+02 4.002e+02 6.925e+02, threshold=6.355e+02, percent-clipped=3.0 +2023-03-09 21:38:17,456 INFO [train.py:898] (2/4) Epoch 27, batch 2750, loss[loss=0.1568, simple_loss=0.2545, pruned_loss=0.02958, over 18252.00 frames. ], tot_loss[loss=0.1557, simple_loss=0.246, pruned_loss=0.03275, over 3602232.31 frames. ], batch size: 60, lr: 4.14e-03, grad_scale: 8.0 +2023-03-09 21:38:47,042 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97260.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:39:13,946 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 21:39:16,420 INFO [train.py:898] (2/4) Epoch 27, batch 2800, loss[loss=0.1343, simple_loss=0.2253, pruned_loss=0.02164, over 18503.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.2466, pruned_loss=0.03285, over 3600932.33 frames. ], batch size: 47, lr: 4.14e-03, grad_scale: 8.0 +2023-03-09 21:39:44,375 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.673e+02 2.484e+02 2.831e+02 3.589e+02 1.148e+03, threshold=5.662e+02, percent-clipped=3.0 +2023-03-09 21:40:00,193 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97321.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:40:01,785 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-03-09 21:40:15,678 INFO [train.py:898] (2/4) Epoch 27, batch 2850, loss[loss=0.1642, simple_loss=0.2626, pruned_loss=0.03291, over 18358.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2461, pruned_loss=0.03241, over 3610335.33 frames. ], batch size: 55, lr: 4.14e-03, grad_scale: 8.0 +2023-03-09 21:40:24,491 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97342.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:40:30,832 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97347.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:40:41,766 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97356.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:41:03,118 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97374.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:41:15,321 INFO [train.py:898] (2/4) Epoch 27, batch 2900, loss[loss=0.1818, simple_loss=0.2728, pruned_loss=0.04543, over 17747.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2454, pruned_loss=0.03221, over 3600731.72 frames. ], batch size: 70, lr: 4.14e-03, grad_scale: 8.0 +2023-03-09 21:41:38,570 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97403.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 21:41:43,555 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.636e+02 2.502e+02 2.896e+02 3.589e+02 7.058e+02, threshold=5.793e+02, percent-clipped=2.0 +2023-03-09 21:41:43,989 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97408.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:42:11,800 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97432.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:42:14,996 INFO [train.py:898] (2/4) Epoch 27, batch 2950, loss[loss=0.1507, simple_loss=0.2378, pruned_loss=0.0318, over 18369.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2465, pruned_loss=0.03253, over 3602797.80 frames. ], batch size: 50, lr: 4.13e-03, grad_scale: 4.0 +2023-03-09 21:42:15,368 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97435.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:43:06,346 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7027, 2.9084, 2.6632, 2.8898, 3.7607, 3.6941, 3.2380, 2.9821], + device='cuda:2'), covar=tensor([0.0167, 0.0311, 0.0534, 0.0422, 0.0166, 0.0154, 0.0379, 0.0394], + device='cuda:2'), in_proj_covar=tensor([0.0146, 0.0146, 0.0168, 0.0166, 0.0143, 0.0128, 0.0162, 0.0165], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 21:43:08,292 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97480.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:43:13,709 INFO [train.py:898] (2/4) Epoch 27, batch 3000, loss[loss=0.1679, simple_loss=0.2615, pruned_loss=0.03715, over 18296.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2469, pruned_loss=0.0328, over 3593060.77 frames. ], batch size: 57, lr: 4.13e-03, grad_scale: 4.0 +2023-03-09 21:43:13,709 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 21:43:26,375 INFO [train.py:932] (2/4) Epoch 27, validation: loss=0.1498, simple_loss=0.2479, pruned_loss=0.02584, over 944034.00 frames. +2023-03-09 21:43:26,376 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 21:43:30,485 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6960, 4.0035, 2.1860, 3.8032, 5.0424, 2.6595, 3.6209, 3.9454], + device='cuda:2'), covar=tensor([0.0248, 0.1204, 0.1878, 0.0712, 0.0110, 0.1251, 0.0757, 0.0740], + device='cuda:2'), in_proj_covar=tensor([0.0185, 0.0283, 0.0212, 0.0203, 0.0144, 0.0188, 0.0225, 0.0233], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 21:43:54,644 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.734e+02 2.624e+02 3.078e+02 3.801e+02 7.902e+02, threshold=6.156e+02, percent-clipped=2.0 +2023-03-09 21:44:09,211 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5943, 3.3496, 2.2848, 4.4181, 3.0475, 4.1938, 2.4492, 3.8057], + device='cuda:2'), covar=tensor([0.0677, 0.0923, 0.1505, 0.0504, 0.0893, 0.0311, 0.1284, 0.0471], + device='cuda:2'), in_proj_covar=tensor([0.0227, 0.0235, 0.0198, 0.0299, 0.0200, 0.0274, 0.0208, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 21:44:25,249 INFO [train.py:898] (2/4) Epoch 27, batch 3050, loss[loss=0.1593, simple_loss=0.2463, pruned_loss=0.03618, over 17767.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2473, pruned_loss=0.03283, over 3596717.32 frames. ], batch size: 70, lr: 4.13e-03, grad_scale: 4.0 +2023-03-09 21:44:49,638 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97555.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:45:24,075 INFO [train.py:898] (2/4) Epoch 27, batch 3100, loss[loss=0.1639, simple_loss=0.2613, pruned_loss=0.03326, over 18350.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2478, pruned_loss=0.0332, over 3584565.56 frames. ], batch size: 55, lr: 4.13e-03, grad_scale: 4.0 +2023-03-09 21:45:39,339 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.59 vs. limit=2.0 +2023-03-09 21:45:52,562 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.712e+02 2.595e+02 3.045e+02 3.637e+02 1.953e+03, threshold=6.090e+02, percent-clipped=1.0 +2023-03-09 21:46:00,724 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97616.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:46:00,959 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97616.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:46:22,625 INFO [train.py:898] (2/4) Epoch 27, batch 3150, loss[loss=0.1337, simple_loss=0.219, pruned_loss=0.02421, over 18451.00 frames. ], tot_loss[loss=0.1576, simple_loss=0.2484, pruned_loss=0.03339, over 3575155.67 frames. ], batch size: 43, lr: 4.13e-03, grad_scale: 4.0 +2023-03-09 21:46:47,677 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97656.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:46:50,070 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5602, 3.3654, 2.2335, 4.3054, 3.0022, 4.0328, 2.4007, 3.6731], + device='cuda:2'), covar=tensor([0.0607, 0.0856, 0.1541, 0.0526, 0.0874, 0.0380, 0.1282, 0.0526], + device='cuda:2'), in_proj_covar=tensor([0.0225, 0.0234, 0.0197, 0.0299, 0.0199, 0.0273, 0.0208, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 21:47:09,209 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97674.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 21:47:20,593 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-09 21:47:21,034 INFO [train.py:898] (2/4) Epoch 27, batch 3200, loss[loss=0.1459, simple_loss=0.2324, pruned_loss=0.02966, over 18553.00 frames. ], tot_loss[loss=0.157, simple_loss=0.2476, pruned_loss=0.03315, over 3578347.47 frames. ], batch size: 49, lr: 4.13e-03, grad_scale: 8.0 +2023-03-09 21:47:36,645 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97698.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 21:47:43,647 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97703.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:47:44,757 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97704.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:47:50,081 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.671e+02 2.606e+02 3.062e+02 3.639e+02 1.226e+03, threshold=6.124e+02, percent-clipped=5.0 +2023-03-09 21:48:14,380 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.56 vs. limit=2.0 +2023-03-09 21:48:14,879 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97730.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:48:20,213 INFO [train.py:898] (2/4) Epoch 27, batch 3250, loss[loss=0.1534, simple_loss=0.2461, pruned_loss=0.03037, over 18266.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2472, pruned_loss=0.03316, over 3577960.29 frames. ], batch size: 57, lr: 4.13e-03, grad_scale: 8.0 +2023-03-09 21:48:20,653 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97735.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 21:49:19,396 INFO [train.py:898] (2/4) Epoch 27, batch 3300, loss[loss=0.1415, simple_loss=0.2275, pruned_loss=0.02776, over 18493.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.2475, pruned_loss=0.03314, over 3577640.02 frames. ], batch size: 44, lr: 4.13e-03, grad_scale: 8.0 +2023-03-09 21:49:47,821 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7874, 5.2477, 5.2225, 5.2072, 4.7264, 5.1554, 4.6122, 5.1318], + device='cuda:2'), covar=tensor([0.0256, 0.0282, 0.0217, 0.0462, 0.0396, 0.0234, 0.1104, 0.0320], + device='cuda:2'), in_proj_covar=tensor([0.0236, 0.0279, 0.0280, 0.0358, 0.0288, 0.0290, 0.0322, 0.0282], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 21:49:48,583 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.710e+02 2.346e+02 2.851e+02 3.327e+02 7.225e+02, threshold=5.702e+02, percent-clipped=2.0 +2023-03-09 21:50:18,958 INFO [train.py:898] (2/4) Epoch 27, batch 3350, loss[loss=0.1546, simple_loss=0.2514, pruned_loss=0.02888, over 18503.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.2477, pruned_loss=0.03306, over 3569778.00 frames. ], batch size: 51, lr: 4.13e-03, grad_scale: 8.0 +2023-03-09 21:51:17,699 INFO [train.py:898] (2/4) Epoch 27, batch 3400, loss[loss=0.1523, simple_loss=0.2359, pruned_loss=0.03439, over 18356.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2479, pruned_loss=0.03316, over 3574362.68 frames. ], batch size: 46, lr: 4.12e-03, grad_scale: 8.0 +2023-03-09 21:51:21,322 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97888.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:51:40,589 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-09 21:51:45,800 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.740e+02 2.527e+02 3.006e+02 3.880e+02 7.423e+02, threshold=6.013e+02, percent-clipped=1.0 +2023-03-09 21:51:48,336 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97911.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:51:48,429 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6143, 4.6124, 4.6845, 4.4241, 4.4865, 4.5509, 4.7319, 4.7291], + device='cuda:2'), covar=tensor([0.0099, 0.0078, 0.0085, 0.0128, 0.0078, 0.0160, 0.0095, 0.0101], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0075, 0.0080, 0.0101, 0.0080, 0.0109, 0.0092, 0.0092], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 21:51:54,454 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97916.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:52:10,398 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-09 21:52:16,405 INFO [train.py:898] (2/4) Epoch 27, batch 3450, loss[loss=0.1666, simple_loss=0.2517, pruned_loss=0.04071, over 12484.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2473, pruned_loss=0.03304, over 3565222.94 frames. ], batch size: 129, lr: 4.12e-03, grad_scale: 8.0 +2023-03-09 21:52:33,438 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97949.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:52:51,317 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=97964.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:53:15,169 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-03-09 21:53:15,532 INFO [train.py:898] (2/4) Epoch 27, batch 3500, loss[loss=0.1626, simple_loss=0.2553, pruned_loss=0.03492, over 18480.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.247, pruned_loss=0.03288, over 3572241.68 frames. ], batch size: 59, lr: 4.12e-03, grad_scale: 8.0 +2023-03-09 21:53:31,122 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97998.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:53:41,460 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98003.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:53:43,453 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8701, 3.8911, 3.7084, 3.3989, 3.6784, 3.0835, 3.1107, 3.9818], + device='cuda:2'), covar=tensor([0.0068, 0.0089, 0.0080, 0.0137, 0.0084, 0.0172, 0.0187, 0.0052], + device='cuda:2'), in_proj_covar=tensor([0.0156, 0.0175, 0.0145, 0.0195, 0.0154, 0.0187, 0.0190, 0.0133], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 21:53:48,591 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.729e+02 2.463e+02 3.009e+02 3.694e+02 8.089e+02, threshold=6.019e+02, percent-clipped=2.0 +2023-03-09 21:53:50,102 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6922, 3.5600, 2.3516, 4.5069, 3.1899, 4.3242, 2.6242, 4.0580], + device='cuda:2'), covar=tensor([0.0674, 0.0865, 0.1488, 0.0499, 0.0842, 0.0363, 0.1200, 0.0417], + device='cuda:2'), in_proj_covar=tensor([0.0225, 0.0235, 0.0196, 0.0298, 0.0199, 0.0273, 0.0209, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 21:54:12,142 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98030.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 21:54:12,282 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98030.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:54:17,274 INFO [train.py:898] (2/4) Epoch 27, batch 3550, loss[loss=0.1322, simple_loss=0.2175, pruned_loss=0.02347, over 18356.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2463, pruned_loss=0.0326, over 3576867.06 frames. ], batch size: 46, lr: 4.12e-03, grad_scale: 8.0 +2023-03-09 21:54:29,875 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98046.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:54:35,245 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98051.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:54:59,066 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-03-09 21:55:04,935 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98078.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:55:12,352 INFO [train.py:898] (2/4) Epoch 27, batch 3600, loss[loss=0.147, simple_loss=0.2236, pruned_loss=0.03522, over 17599.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2457, pruned_loss=0.03259, over 3569419.76 frames. ], batch size: 39, lr: 4.12e-03, grad_scale: 8.0 +2023-03-09 21:55:35,498 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98106.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:55:38,137 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.847e+02 2.404e+02 2.873e+02 3.414e+02 5.036e+02, threshold=5.745e+02, percent-clipped=0.0 +2023-03-09 21:56:16,248 INFO [train.py:898] (2/4) Epoch 28, batch 0, loss[loss=0.1523, simple_loss=0.244, pruned_loss=0.03028, over 18531.00 frames. ], tot_loss[loss=0.1523, simple_loss=0.244, pruned_loss=0.03028, over 18531.00 frames. ], batch size: 47, lr: 4.04e-03, grad_scale: 8.0 +2023-03-09 21:56:16,248 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 21:56:28,179 INFO [train.py:932] (2/4) Epoch 28, validation: loss=0.1499, simple_loss=0.2483, pruned_loss=0.02581, over 944034.00 frames. +2023-03-09 21:56:28,180 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 21:57:24,354 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98167.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:57:26,207 INFO [train.py:898] (2/4) Epoch 28, batch 50, loss[loss=0.1785, simple_loss=0.2693, pruned_loss=0.04384, over 18270.00 frames. ], tot_loss[loss=0.1573, simple_loss=0.2481, pruned_loss=0.03326, over 804147.58 frames. ], batch size: 57, lr: 4.04e-03, grad_scale: 8.0 +2023-03-09 21:58:15,011 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.839e+02 2.500e+02 2.819e+02 3.475e+02 6.212e+02, threshold=5.638e+02, percent-clipped=1.0 +2023-03-09 21:58:17,546 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98211.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:58:26,501 INFO [train.py:898] (2/4) Epoch 28, batch 100, loss[loss=0.1635, simple_loss=0.2535, pruned_loss=0.03678, over 18117.00 frames. ], tot_loss[loss=0.1575, simple_loss=0.2477, pruned_loss=0.03368, over 1419949.29 frames. ], batch size: 62, lr: 4.04e-03, grad_scale: 8.0 +2023-03-09 21:58:34,782 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0587, 5.5395, 5.2422, 5.3160, 5.1420, 5.0146, 5.6043, 5.5440], + device='cuda:2'), covar=tensor([0.1136, 0.0792, 0.0675, 0.0753, 0.1485, 0.0756, 0.0598, 0.0771], + device='cuda:2'), in_proj_covar=tensor([0.0635, 0.0559, 0.0399, 0.0581, 0.0778, 0.0575, 0.0788, 0.0607], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 21:58:41,225 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7410, 2.4349, 2.7265, 2.6848, 3.2888, 4.9940, 4.8187, 3.3340], + device='cuda:2'), covar=tensor([0.2041, 0.2599, 0.3150, 0.2093, 0.2516, 0.0246, 0.0377, 0.1148], + device='cuda:2'), in_proj_covar=tensor([0.0329, 0.0364, 0.0410, 0.0292, 0.0397, 0.0268, 0.0302, 0.0272], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 21:58:53,034 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5780, 3.9590, 2.2357, 3.8388, 5.0110, 2.5665, 3.5438, 3.7402], + device='cuda:2'), covar=tensor([0.0268, 0.1266, 0.1842, 0.0711, 0.0111, 0.1308, 0.0844, 0.0858], + device='cuda:2'), in_proj_covar=tensor([0.0185, 0.0282, 0.0210, 0.0202, 0.0143, 0.0187, 0.0223, 0.0232], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 21:58:56,014 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98244.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:59:14,187 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98259.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 21:59:25,572 INFO [train.py:898] (2/4) Epoch 28, batch 150, loss[loss=0.1495, simple_loss=0.2303, pruned_loss=0.03437, over 17634.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2474, pruned_loss=0.03347, over 1903417.07 frames. ], batch size: 39, lr: 4.04e-03, grad_scale: 8.0 +2023-03-09 21:59:37,568 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5091, 3.2687, 4.3187, 3.8926, 3.1476, 2.9402, 3.9132, 4.4928], + device='cuda:2'), covar=tensor([0.0906, 0.1429, 0.0309, 0.0488, 0.0961, 0.1258, 0.0500, 0.0345], + device='cuda:2'), in_proj_covar=tensor([0.0154, 0.0284, 0.0172, 0.0188, 0.0198, 0.0198, 0.0201, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 22:00:12,040 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.6953, 5.6392, 5.2851, 5.5981, 5.5913, 4.9409, 5.4701, 5.2704], + device='cuda:2'), covar=tensor([0.0397, 0.0396, 0.1267, 0.0781, 0.0512, 0.0422, 0.0458, 0.0935], + device='cuda:2'), in_proj_covar=tensor([0.0516, 0.0589, 0.0728, 0.0455, 0.0474, 0.0538, 0.0572, 0.0701], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 22:00:13,480 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5012, 2.1844, 2.3840, 2.5575, 2.7335, 4.7074, 4.6992, 3.3171], + device='cuda:2'), covar=tensor([0.2531, 0.3384, 0.3740, 0.2333, 0.3797, 0.0341, 0.0415, 0.1145], + device='cuda:2'), in_proj_covar=tensor([0.0331, 0.0365, 0.0411, 0.0293, 0.0398, 0.0268, 0.0302, 0.0273], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 22:00:14,047 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.664e+02 2.460e+02 2.957e+02 3.466e+02 5.415e+02, threshold=5.915e+02, percent-clipped=0.0 +2023-03-09 22:00:25,420 INFO [train.py:898] (2/4) Epoch 28, batch 200, loss[loss=0.1343, simple_loss=0.2176, pruned_loss=0.02551, over 18170.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.2467, pruned_loss=0.03276, over 2283218.28 frames. ], batch size: 44, lr: 4.04e-03, grad_scale: 8.0 +2023-03-09 22:00:38,096 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98330.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 22:00:52,426 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6950, 3.4475, 5.3902, 3.2117, 4.7516, 2.5872, 3.0937, 1.9427], + device='cuda:2'), covar=tensor([0.1376, 0.1133, 0.0172, 0.0803, 0.0476, 0.2851, 0.2814, 0.2322], + device='cuda:2'), in_proj_covar=tensor([0.0232, 0.0255, 0.0229, 0.0210, 0.0266, 0.0281, 0.0338, 0.0247], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 22:01:24,247 INFO [train.py:898] (2/4) Epoch 28, batch 250, loss[loss=0.1433, simple_loss=0.2295, pruned_loss=0.02857, over 18534.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2467, pruned_loss=0.03285, over 2572960.03 frames. ], batch size: 49, lr: 4.04e-03, grad_scale: 8.0 +2023-03-09 22:01:30,419 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8290, 3.3919, 2.6459, 3.2936, 3.9586, 2.6725, 3.3779, 3.3982], + device='cuda:2'), covar=tensor([0.0345, 0.1118, 0.1385, 0.0737, 0.0232, 0.1094, 0.0713, 0.0726], + device='cuda:2'), in_proj_covar=tensor([0.0183, 0.0280, 0.0209, 0.0201, 0.0143, 0.0186, 0.0222, 0.0231], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 22:01:34,752 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98378.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 22:01:43,804 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7669, 3.1125, 4.4960, 3.7321, 2.7920, 4.6991, 3.9840, 3.0882], + device='cuda:2'), covar=tensor([0.0495, 0.1402, 0.0310, 0.0480, 0.1592, 0.0240, 0.0635, 0.0938], + device='cuda:2'), in_proj_covar=tensor([0.0223, 0.0246, 0.0233, 0.0174, 0.0229, 0.0221, 0.0260, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 22:02:11,175 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.633e+02 2.515e+02 2.972e+02 3.446e+02 6.764e+02, threshold=5.944e+02, percent-clipped=3.0 +2023-03-09 22:02:12,599 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98410.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:02:23,126 INFO [train.py:898] (2/4) Epoch 28, batch 300, loss[loss=0.13, simple_loss=0.2109, pruned_loss=0.02451, over 18475.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2467, pruned_loss=0.03284, over 2798282.77 frames. ], batch size: 43, lr: 4.04e-03, grad_scale: 8.0 +2023-03-09 22:02:32,701 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0426, 5.0642, 5.1282, 4.8381, 4.9270, 4.8807, 5.1873, 5.2535], + device='cuda:2'), covar=tensor([0.0067, 0.0065, 0.0062, 0.0117, 0.0057, 0.0171, 0.0089, 0.0086], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0075, 0.0080, 0.0100, 0.0080, 0.0109, 0.0092, 0.0092], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 22:02:56,609 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5770, 2.3058, 2.4960, 2.5502, 3.0247, 4.3115, 4.2691, 3.2112], + device='cuda:2'), covar=tensor([0.2190, 0.2895, 0.3369, 0.2285, 0.2733, 0.0447, 0.0478, 0.1093], + device='cuda:2'), in_proj_covar=tensor([0.0331, 0.0365, 0.0412, 0.0293, 0.0398, 0.0268, 0.0302, 0.0273], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 22:03:14,424 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98462.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:03:22,740 INFO [train.py:898] (2/4) Epoch 28, batch 350, loss[loss=0.1753, simple_loss=0.2712, pruned_loss=0.03965, over 16007.00 frames. ], tot_loss[loss=0.156, simple_loss=0.2465, pruned_loss=0.03272, over 2974626.86 frames. ], batch size: 94, lr: 4.04e-03, grad_scale: 8.0 +2023-03-09 22:03:25,410 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98471.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:04:09,218 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.910e+02 2.547e+02 2.961e+02 3.445e+02 6.973e+02, threshold=5.923e+02, percent-clipped=1.0 +2023-03-09 22:04:21,293 INFO [train.py:898] (2/4) Epoch 28, batch 400, loss[loss=0.1525, simple_loss=0.2512, pruned_loss=0.02696, over 18307.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2465, pruned_loss=0.03255, over 3114310.23 frames. ], batch size: 54, lr: 4.04e-03, grad_scale: 8.0 +2023-03-09 22:04:37,429 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 22:04:43,823 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7021, 3.0327, 2.7389, 2.9722, 3.7778, 3.6798, 3.2667, 3.0379], + device='cuda:2'), covar=tensor([0.0168, 0.0258, 0.0491, 0.0371, 0.0186, 0.0148, 0.0356, 0.0395], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0147, 0.0168, 0.0168, 0.0144, 0.0130, 0.0163, 0.0166], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 22:04:49,566 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98543.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:04:50,568 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98544.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:05:20,089 INFO [train.py:898] (2/4) Epoch 28, batch 450, loss[loss=0.1521, simple_loss=0.2431, pruned_loss=0.03061, over 18283.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.2456, pruned_loss=0.03228, over 3232885.42 frames. ], batch size: 49, lr: 4.04e-03, grad_scale: 8.0 +2023-03-09 22:05:46,787 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98592.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:06:01,952 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98604.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:06:07,231 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.606e+02 2.466e+02 2.902e+02 3.493e+02 6.837e+02, threshold=5.804e+02, percent-clipped=4.0 +2023-03-09 22:06:13,864 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3924, 2.3745, 3.9021, 3.5375, 2.1554, 4.0529, 3.5358, 2.5432], + device='cuda:2'), covar=tensor([0.0552, 0.2144, 0.0379, 0.0456, 0.2249, 0.0341, 0.0768, 0.1315], + device='cuda:2'), in_proj_covar=tensor([0.0223, 0.0246, 0.0233, 0.0174, 0.0228, 0.0221, 0.0260, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 22:06:19,752 INFO [train.py:898] (2/4) Epoch 28, batch 500, loss[loss=0.1477, simple_loss=0.2415, pruned_loss=0.02692, over 18484.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2462, pruned_loss=0.03264, over 3292580.03 frames. ], batch size: 51, lr: 4.03e-03, grad_scale: 8.0 +2023-03-09 22:06:24,722 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7496, 3.0089, 4.2588, 3.7179, 2.6275, 4.5409, 3.8362, 2.8256], + device='cuda:2'), covar=tensor([0.0483, 0.1536, 0.0352, 0.0472, 0.1688, 0.0223, 0.0666, 0.1088], + device='cuda:2'), in_proj_covar=tensor([0.0222, 0.0245, 0.0233, 0.0174, 0.0228, 0.0221, 0.0260, 0.0198], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 22:07:15,633 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.09 vs. limit=5.0 +2023-03-09 22:07:18,533 INFO [train.py:898] (2/4) Epoch 28, batch 550, loss[loss=0.1636, simple_loss=0.2551, pruned_loss=0.03611, over 16216.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2459, pruned_loss=0.03243, over 3363153.50 frames. ], batch size: 94, lr: 4.03e-03, grad_scale: 8.0 +2023-03-09 22:07:38,674 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.73 vs. limit=2.0 +2023-03-09 22:08:05,611 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.504e+02 2.472e+02 2.832e+02 3.276e+02 6.900e+02, threshold=5.664e+02, percent-clipped=1.0 +2023-03-09 22:08:17,676 INFO [train.py:898] (2/4) Epoch 28, batch 600, loss[loss=0.1492, simple_loss=0.2489, pruned_loss=0.02468, over 18395.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.2457, pruned_loss=0.03225, over 3403876.09 frames. ], batch size: 52, lr: 4.03e-03, grad_scale: 8.0 +2023-03-09 22:08:38,868 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5936, 3.6278, 2.3884, 4.5265, 3.2359, 4.3708, 2.7365, 4.1621], + device='cuda:2'), covar=tensor([0.0723, 0.0859, 0.1515, 0.0480, 0.0808, 0.0351, 0.1190, 0.0413], + device='cuda:2'), in_proj_covar=tensor([0.0225, 0.0235, 0.0196, 0.0299, 0.0198, 0.0274, 0.0209, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 22:09:08,705 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98762.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:09:12,994 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98766.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:09:16,298 INFO [train.py:898] (2/4) Epoch 28, batch 650, loss[loss=0.1484, simple_loss=0.2407, pruned_loss=0.0281, over 18292.00 frames. ], tot_loss[loss=0.1552, simple_loss=0.2458, pruned_loss=0.03233, over 3440544.98 frames. ], batch size: 49, lr: 4.03e-03, grad_scale: 8.0 +2023-03-09 22:09:42,213 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-09 22:10:03,545 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.904e+02 2.586e+02 3.038e+02 3.660e+02 8.440e+02, threshold=6.077e+02, percent-clipped=4.0 +2023-03-09 22:10:04,947 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=98810.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:10:15,452 INFO [train.py:898] (2/4) Epoch 28, batch 700, loss[loss=0.1666, simple_loss=0.2562, pruned_loss=0.03851, over 17661.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2464, pruned_loss=0.03263, over 3475481.44 frames. ], batch size: 70, lr: 4.03e-03, grad_scale: 8.0 +2023-03-09 22:10:44,942 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1656, 5.6968, 5.3332, 5.5167, 5.3290, 5.1584, 5.7669, 5.7510], + device='cuda:2'), covar=tensor([0.1162, 0.0809, 0.0605, 0.0635, 0.1372, 0.0708, 0.0603, 0.0694], + device='cuda:2'), in_proj_covar=tensor([0.0643, 0.0566, 0.0406, 0.0586, 0.0789, 0.0583, 0.0803, 0.0613], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 22:11:14,550 INFO [train.py:898] (2/4) Epoch 28, batch 750, loss[loss=0.1892, simple_loss=0.27, pruned_loss=0.05422, over 12632.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2467, pruned_loss=0.03259, over 3496673.22 frames. ], batch size: 130, lr: 4.03e-03, grad_scale: 8.0 +2023-03-09 22:11:39,948 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98890.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:11:46,324 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98895.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:11:50,706 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98899.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:12:02,002 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.555e+02 2.589e+02 3.008e+02 3.946e+02 6.579e+02, threshold=6.017e+02, percent-clipped=1.0 +2023-03-09 22:12:13,961 INFO [train.py:898] (2/4) Epoch 28, batch 800, loss[loss=0.1753, simple_loss=0.2682, pruned_loss=0.04118, over 18128.00 frames. ], tot_loss[loss=0.155, simple_loss=0.2458, pruned_loss=0.03208, over 3526335.95 frames. ], batch size: 62, lr: 4.03e-03, grad_scale: 8.0 +2023-03-09 22:12:52,314 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98951.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:12:58,204 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98956.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:12:59,634 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.61 vs. limit=2.0 +2023-03-09 22:13:12,397 INFO [train.py:898] (2/4) Epoch 28, batch 850, loss[loss=0.1533, simple_loss=0.2494, pruned_loss=0.02857, over 18387.00 frames. ], tot_loss[loss=0.1552, simple_loss=0.2462, pruned_loss=0.03213, over 3542917.28 frames. ], batch size: 50, lr: 4.03e-03, grad_scale: 8.0 +2023-03-09 22:13:15,053 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98971.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:14:00,121 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.700e+02 2.503e+02 2.990e+02 3.802e+02 7.789e+02, threshold=5.979e+02, percent-clipped=2.0 +2023-03-09 22:14:11,480 INFO [train.py:898] (2/4) Epoch 28, batch 900, loss[loss=0.15, simple_loss=0.2391, pruned_loss=0.03047, over 18409.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2466, pruned_loss=0.03251, over 3551521.97 frames. ], batch size: 50, lr: 4.03e-03, grad_scale: 8.0 +2023-03-09 22:14:14,853 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-09 22:14:27,829 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99032.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:14:49,956 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99051.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:15:07,197 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99066.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:15:10,394 INFO [train.py:898] (2/4) Epoch 28, batch 950, loss[loss=0.1596, simple_loss=0.2516, pruned_loss=0.03382, over 18501.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2468, pruned_loss=0.03255, over 3562805.32 frames. ], batch size: 47, lr: 4.02e-03, grad_scale: 8.0 +2023-03-09 22:15:39,242 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6793, 3.9405, 2.3430, 3.9857, 5.0177, 2.4935, 3.5612, 3.9467], + device='cuda:2'), covar=tensor([0.0243, 0.1411, 0.1895, 0.0729, 0.0126, 0.1485, 0.0866, 0.0688], + device='cuda:2'), in_proj_covar=tensor([0.0184, 0.0280, 0.0209, 0.0201, 0.0143, 0.0185, 0.0222, 0.0231], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 22:15:57,910 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.971e+02 2.497e+02 3.033e+02 3.631e+02 7.579e+02, threshold=6.066e+02, percent-clipped=1.0 +2023-03-09 22:16:01,766 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99112.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:16:03,882 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99114.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:16:09,567 INFO [train.py:898] (2/4) Epoch 28, batch 1000, loss[loss=0.1509, simple_loss=0.2425, pruned_loss=0.02962, over 18412.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.2468, pruned_loss=0.03271, over 3554064.88 frames. ], batch size: 50, lr: 4.02e-03, grad_scale: 8.0 +2023-03-09 22:17:08,449 INFO [train.py:898] (2/4) Epoch 28, batch 1050, loss[loss=0.1552, simple_loss=0.2513, pruned_loss=0.0295, over 18486.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.2472, pruned_loss=0.03279, over 3566008.15 frames. ], batch size: 51, lr: 4.02e-03, grad_scale: 8.0 +2023-03-09 22:17:43,541 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99199.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:17:55,236 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.556e+02 2.410e+02 2.895e+02 3.644e+02 7.761e+02, threshold=5.789e+02, percent-clipped=3.0 +2023-03-09 22:18:06,850 INFO [train.py:898] (2/4) Epoch 28, batch 1100, loss[loss=0.1433, simple_loss=0.2256, pruned_loss=0.03047, over 18039.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2474, pruned_loss=0.03285, over 3563398.01 frames. ], batch size: 40, lr: 4.02e-03, grad_scale: 8.0 +2023-03-09 22:18:11,921 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6767, 2.3185, 2.5668, 2.6725, 3.0084, 4.4877, 4.5458, 3.1708], + device='cuda:2'), covar=tensor([0.2056, 0.2740, 0.3085, 0.1904, 0.2611, 0.0328, 0.0387, 0.1122], + device='cuda:2'), in_proj_covar=tensor([0.0332, 0.0365, 0.0413, 0.0292, 0.0398, 0.0269, 0.0304, 0.0273], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 22:18:17,479 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4564, 3.2554, 2.0268, 4.3700, 2.9680, 3.8980, 2.2382, 3.7926], + device='cuda:2'), covar=tensor([0.0649, 0.0987, 0.1732, 0.0500, 0.0899, 0.0252, 0.1467, 0.0519], + device='cuda:2'), in_proj_covar=tensor([0.0228, 0.0237, 0.0198, 0.0300, 0.0199, 0.0275, 0.0210, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 22:18:18,490 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99229.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:18:39,057 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99246.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:18:40,203 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99247.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:18:44,752 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99251.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:19:05,407 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99268.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:19:06,230 INFO [train.py:898] (2/4) Epoch 28, batch 1150, loss[loss=0.1898, simple_loss=0.273, pruned_loss=0.05331, over 13378.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.2475, pruned_loss=0.03307, over 3559133.97 frames. ], batch size: 131, lr: 4.02e-03, grad_scale: 8.0 +2023-03-09 22:19:31,899 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99290.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:19:47,023 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9708, 3.7742, 5.0785, 4.4789, 3.5214, 3.1798, 4.5885, 5.3043], + device='cuda:2'), covar=tensor([0.0729, 0.1404, 0.0198, 0.0350, 0.0848, 0.1073, 0.0344, 0.0192], + device='cuda:2'), in_proj_covar=tensor([0.0154, 0.0283, 0.0173, 0.0188, 0.0198, 0.0198, 0.0201, 0.0212], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 22:19:53,827 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.536e+02 2.497e+02 2.883e+02 3.542e+02 9.328e+02, threshold=5.765e+02, percent-clipped=1.0 +2023-03-09 22:20:05,824 INFO [train.py:898] (2/4) Epoch 28, batch 1200, loss[loss=0.1626, simple_loss=0.2542, pruned_loss=0.03548, over 18528.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2469, pruned_loss=0.03279, over 3561700.27 frames. ], batch size: 49, lr: 4.02e-03, grad_scale: 8.0 +2023-03-09 22:20:12,058 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99324.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:20:15,138 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99327.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:20:17,659 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99329.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:21:04,010 INFO [train.py:898] (2/4) Epoch 28, batch 1250, loss[loss=0.1516, simple_loss=0.246, pruned_loss=0.02858, over 18478.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.2471, pruned_loss=0.03287, over 3572658.27 frames. ], batch size: 53, lr: 4.02e-03, grad_scale: 8.0 +2023-03-09 22:21:18,328 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-09 22:21:23,267 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99385.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:21:46,470 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-03-09 22:21:49,414 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99407.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:21:51,421 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.740e+02 2.716e+02 3.087e+02 3.833e+02 7.403e+02, threshold=6.174e+02, percent-clipped=2.0 +2023-03-09 22:22:03,400 INFO [train.py:898] (2/4) Epoch 28, batch 1300, loss[loss=0.1429, simple_loss=0.229, pruned_loss=0.02838, over 17211.00 frames. ], tot_loss[loss=0.156, simple_loss=0.2466, pruned_loss=0.03267, over 3578540.09 frames. ], batch size: 38, lr: 4.02e-03, grad_scale: 16.0 +2023-03-09 22:22:05,009 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99420.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:22:58,230 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.01 vs. limit=5.0 +2023-03-09 22:23:01,739 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99468.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 22:23:02,624 INFO [train.py:898] (2/4) Epoch 28, batch 1350, loss[loss=0.1388, simple_loss=0.2299, pruned_loss=0.02387, over 18553.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.247, pruned_loss=0.03284, over 3568679.91 frames. ], batch size: 49, lr: 4.02e-03, grad_scale: 16.0 +2023-03-09 22:23:17,501 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99481.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:23:49,955 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.936e+02 2.551e+02 3.034e+02 3.736e+02 7.669e+02, threshold=6.068e+02, percent-clipped=1.0 +2023-03-09 22:23:52,043 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-03-09 22:24:01,510 INFO [train.py:898] (2/4) Epoch 28, batch 1400, loss[loss=0.1589, simple_loss=0.2517, pruned_loss=0.03308, over 18325.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.2474, pruned_loss=0.03305, over 3577863.25 frames. ], batch size: 54, lr: 4.02e-03, grad_scale: 16.0 +2023-03-09 22:24:13,861 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99529.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 22:24:33,038 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99546.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:24:39,349 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99551.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:24:44,510 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8678, 3.0397, 2.7645, 3.0268, 3.8090, 3.7847, 3.3316, 3.0730], + device='cuda:2'), covar=tensor([0.0163, 0.0296, 0.0559, 0.0426, 0.0206, 0.0158, 0.0375, 0.0431], + device='cuda:2'), in_proj_covar=tensor([0.0149, 0.0148, 0.0170, 0.0169, 0.0145, 0.0131, 0.0163, 0.0167], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 22:24:59,146 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99568.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:24:59,961 INFO [train.py:898] (2/4) Epoch 28, batch 1450, loss[loss=0.157, simple_loss=0.2542, pruned_loss=0.02993, over 18625.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.248, pruned_loss=0.03322, over 3572856.47 frames. ], batch size: 52, lr: 4.01e-03, grad_scale: 16.0 +2023-03-09 22:25:14,398 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5111, 6.0282, 5.5757, 5.8400, 5.6713, 5.4709, 6.1120, 6.0680], + device='cuda:2'), covar=tensor([0.1181, 0.0790, 0.0506, 0.0691, 0.1353, 0.0688, 0.0541, 0.0686], + device='cuda:2'), in_proj_covar=tensor([0.0642, 0.0561, 0.0404, 0.0585, 0.0785, 0.0583, 0.0797, 0.0613], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 22:25:16,226 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-09 22:25:19,492 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99585.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:25:24,272 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99589.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:25:29,600 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99594.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:25:35,197 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99599.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:25:48,002 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.800e+02 2.454e+02 2.982e+02 3.510e+02 7.061e+02, threshold=5.964e+02, percent-clipped=2.0 +2023-03-09 22:25:51,194 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-03-09 22:25:59,259 INFO [train.py:898] (2/4) Epoch 28, batch 1500, loss[loss=0.1605, simple_loss=0.2635, pruned_loss=0.02873, over 16158.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.2477, pruned_loss=0.03304, over 3563377.06 frames. ], batch size: 94, lr: 4.01e-03, grad_scale: 16.0 +2023-03-09 22:26:05,330 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99624.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:26:09,216 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99627.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:26:11,674 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99629.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:26:11,947 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-03-09 22:26:35,817 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99650.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:26:38,318 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.01 vs. limit=5.0 +2023-03-09 22:26:43,330 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-09 22:26:58,084 INFO [train.py:898] (2/4) Epoch 28, batch 1550, loss[loss=0.179, simple_loss=0.2722, pruned_loss=0.04296, over 12263.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2471, pruned_loss=0.03296, over 3563279.58 frames. ], batch size: 129, lr: 4.01e-03, grad_scale: 16.0 +2023-03-09 22:27:05,107 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99675.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:27:11,339 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99680.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:27:11,472 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99680.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:27:13,639 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9056, 2.9788, 2.1746, 3.3246, 2.5086, 2.8614, 2.4144, 2.9331], + device='cuda:2'), covar=tensor([0.0646, 0.0741, 0.1272, 0.0593, 0.0824, 0.0307, 0.0984, 0.0458], + device='cuda:2'), in_proj_covar=tensor([0.0227, 0.0236, 0.0197, 0.0300, 0.0200, 0.0276, 0.0209, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 22:27:43,135 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99707.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:27:45,109 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.790e+02 2.548e+02 3.050e+02 3.629e+02 6.290e+02, threshold=6.100e+02, percent-clipped=1.0 +2023-03-09 22:27:57,323 INFO [train.py:898] (2/4) Epoch 28, batch 1600, loss[loss=0.1334, simple_loss=0.2236, pruned_loss=0.02162, over 18368.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2474, pruned_loss=0.03295, over 3566818.83 frames. ], batch size: 46, lr: 4.01e-03, grad_scale: 16.0 +2023-03-09 22:28:22,785 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99741.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:28:39,282 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99755.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:28:56,207 INFO [train.py:898] (2/4) Epoch 28, batch 1650, loss[loss=0.1644, simple_loss=0.2602, pruned_loss=0.03431, over 18241.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2469, pruned_loss=0.03277, over 3562546.78 frames. ], batch size: 60, lr: 4.01e-03, grad_scale: 16.0 +2023-03-09 22:29:04,526 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99776.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:29:42,835 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.627e+02 2.331e+02 2.829e+02 3.453e+02 7.033e+02, threshold=5.658e+02, percent-clipped=1.0 +2023-03-09 22:29:55,651 INFO [train.py:898] (2/4) Epoch 28, batch 1700, loss[loss=0.1339, simple_loss=0.2248, pruned_loss=0.02149, over 18250.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.246, pruned_loss=0.03249, over 3563957.48 frames. ], batch size: 45, lr: 4.01e-03, grad_scale: 16.0 +2023-03-09 22:30:01,619 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99824.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 22:30:54,436 INFO [train.py:898] (2/4) Epoch 28, batch 1750, loss[loss=0.1513, simple_loss=0.2449, pruned_loss=0.02888, over 17043.00 frames. ], tot_loss[loss=0.156, simple_loss=0.2464, pruned_loss=0.03277, over 3567910.45 frames. ], batch size: 78, lr: 4.01e-03, grad_scale: 16.0 +2023-03-09 22:31:12,554 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99885.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:31:18,281 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-09 22:31:35,431 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-09 22:31:39,965 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.785e+02 2.621e+02 3.029e+02 3.741e+02 5.430e+02, threshold=6.058e+02, percent-clipped=0.0 +2023-03-09 22:31:46,611 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7533, 4.0617, 2.3883, 4.0249, 5.0966, 2.6459, 3.7255, 3.9694], + device='cuda:2'), covar=tensor([0.0234, 0.1265, 0.1748, 0.0664, 0.0115, 0.1231, 0.0715, 0.0755], + device='cuda:2'), in_proj_covar=tensor([0.0186, 0.0283, 0.0211, 0.0201, 0.0144, 0.0187, 0.0224, 0.0233], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 22:31:51,930 INFO [train.py:898] (2/4) Epoch 28, batch 1800, loss[loss=0.1472, simple_loss=0.2394, pruned_loss=0.02748, over 18489.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2466, pruned_loss=0.0325, over 3569664.11 frames. ], batch size: 51, lr: 4.01e-03, grad_scale: 16.0 +2023-03-09 22:31:58,567 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99924.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:31:58,707 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99924.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:32:08,639 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99933.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:32:22,795 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99945.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:32:51,448 INFO [train.py:898] (2/4) Epoch 28, batch 1850, loss[loss=0.1575, simple_loss=0.2568, pruned_loss=0.02912, over 18636.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.246, pruned_loss=0.03213, over 3571042.52 frames. ], batch size: 52, lr: 4.01e-03, grad_scale: 16.0 +2023-03-09 22:32:56,004 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=99972.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:33:05,224 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99980.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:33:41,513 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100007.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 22:33:43,360 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.762e+02 2.401e+02 2.680e+02 3.285e+02 5.327e+02, threshold=5.360e+02, percent-clipped=0.0 +2023-03-09 22:33:55,962 INFO [train.py:898] (2/4) Epoch 28, batch 1900, loss[loss=0.1704, simple_loss=0.2695, pruned_loss=0.03567, over 18494.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2461, pruned_loss=0.03244, over 3575296.81 frames. ], batch size: 51, lr: 4.01e-03, grad_scale: 16.0 +2023-03-09 22:34:06,780 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100028.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:34:15,692 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100036.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:34:35,578 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5566, 3.5183, 2.2065, 4.4306, 3.1216, 3.9296, 2.2345, 3.7972], + device='cuda:2'), covar=tensor([0.0661, 0.0868, 0.1614, 0.0497, 0.0848, 0.0340, 0.1513, 0.0539], + device='cuda:2'), in_proj_covar=tensor([0.0226, 0.0234, 0.0196, 0.0299, 0.0199, 0.0274, 0.0208, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 22:34:53,655 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100068.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 22:34:54,378 INFO [train.py:898] (2/4) Epoch 28, batch 1950, loss[loss=0.1374, simple_loss=0.223, pruned_loss=0.02591, over 18522.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.2473, pruned_loss=0.03277, over 3572469.09 frames. ], batch size: 44, lr: 4.00e-03, grad_scale: 16.0 +2023-03-09 22:35:03,933 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100076.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:35:41,385 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.807e+02 2.428e+02 2.748e+02 3.405e+02 9.642e+02, threshold=5.496e+02, percent-clipped=3.0 +2023-03-09 22:35:53,097 INFO [train.py:898] (2/4) Epoch 28, batch 2000, loss[loss=0.1681, simple_loss=0.2559, pruned_loss=0.0402, over 18258.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.2474, pruned_loss=0.0327, over 3567289.04 frames. ], batch size: 47, lr: 4.00e-03, grad_scale: 16.0 +2023-03-09 22:35:59,786 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100124.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:35:59,859 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100124.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 22:36:48,564 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-09 22:36:52,993 INFO [train.py:898] (2/4) Epoch 28, batch 2050, loss[loss=0.1568, simple_loss=0.2518, pruned_loss=0.03085, over 18380.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.2472, pruned_loss=0.03248, over 3580392.27 frames. ], batch size: 50, lr: 4.00e-03, grad_scale: 16.0 +2023-03-09 22:36:56,633 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100172.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 22:37:39,779 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.832e+02 2.490e+02 2.932e+02 3.409e+02 5.416e+02, threshold=5.865e+02, percent-clipped=0.0 +2023-03-09 22:37:46,934 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5859, 4.6648, 4.6929, 4.4344, 4.4216, 4.4872, 4.7788, 4.7264], + device='cuda:2'), covar=tensor([0.0085, 0.0068, 0.0070, 0.0121, 0.0071, 0.0154, 0.0083, 0.0104], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0075, 0.0080, 0.0100, 0.0080, 0.0109, 0.0092, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 22:37:51,113 INFO [train.py:898] (2/4) Epoch 28, batch 2100, loss[loss=0.1475, simple_loss=0.2454, pruned_loss=0.02481, over 18391.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2467, pruned_loss=0.03241, over 3579178.02 frames. ], batch size: 52, lr: 4.00e-03, grad_scale: 16.0 +2023-03-09 22:37:57,803 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100224.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:38:14,572 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100238.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:38:22,697 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100245.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:38:48,622 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-03-09 22:38:50,269 INFO [train.py:898] (2/4) Epoch 28, batch 2150, loss[loss=0.1351, simple_loss=0.2211, pruned_loss=0.02458, over 18364.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2465, pruned_loss=0.03237, over 3576084.99 frames. ], batch size: 46, lr: 4.00e-03, grad_scale: 16.0 +2023-03-09 22:38:54,618 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100272.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:39:19,650 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-03-09 22:39:19,974 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100293.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:39:27,036 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100299.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:39:30,485 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4741, 2.7404, 4.1075, 3.5352, 2.6183, 4.3306, 3.8102, 2.8297], + device='cuda:2'), covar=tensor([0.0595, 0.1577, 0.0329, 0.0551, 0.1565, 0.0244, 0.0592, 0.0955], + device='cuda:2'), in_proj_covar=tensor([0.0223, 0.0246, 0.0235, 0.0174, 0.0229, 0.0220, 0.0257, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 22:39:38,058 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.888e+02 2.494e+02 2.925e+02 3.420e+02 7.055e+02, threshold=5.850e+02, percent-clipped=2.0 +2023-03-09 22:39:50,200 INFO [train.py:898] (2/4) Epoch 28, batch 2200, loss[loss=0.1577, simple_loss=0.2505, pruned_loss=0.03243, over 18405.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2474, pruned_loss=0.03252, over 3583319.94 frames. ], batch size: 52, lr: 4.00e-03, grad_scale: 16.0 +2023-03-09 22:40:08,706 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.51 vs. limit=5.0 +2023-03-09 22:40:09,375 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3634, 5.3474, 4.9970, 5.2933, 5.2807, 4.7693, 5.1877, 4.9336], + device='cuda:2'), covar=tensor([0.0406, 0.0459, 0.1187, 0.0729, 0.0575, 0.0399, 0.0419, 0.1091], + device='cuda:2'), in_proj_covar=tensor([0.0517, 0.0587, 0.0727, 0.0455, 0.0476, 0.0538, 0.0572, 0.0704], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 22:40:11,169 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100336.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:40:26,484 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5026, 2.2410, 2.5207, 2.6524, 3.0184, 4.5788, 4.5750, 2.8906], + device='cuda:2'), covar=tensor([0.2244, 0.2786, 0.3185, 0.2086, 0.2672, 0.0312, 0.0397, 0.1295], + device='cuda:2'), in_proj_covar=tensor([0.0332, 0.0365, 0.0413, 0.0292, 0.0397, 0.0267, 0.0303, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 22:40:28,735 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6792, 3.6227, 2.3185, 4.4740, 3.3182, 4.2063, 2.7717, 3.9948], + device='cuda:2'), covar=tensor([0.0629, 0.0812, 0.1510, 0.0544, 0.0778, 0.0381, 0.1080, 0.0455], + device='cuda:2'), in_proj_covar=tensor([0.0227, 0.0235, 0.0197, 0.0300, 0.0200, 0.0276, 0.0210, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 22:40:43,184 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100363.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 22:40:49,849 INFO [train.py:898] (2/4) Epoch 28, batch 2250, loss[loss=0.1529, simple_loss=0.2495, pruned_loss=0.02819, over 18575.00 frames. ], tot_loss[loss=0.1563, simple_loss=0.2476, pruned_loss=0.03244, over 3587191.53 frames. ], batch size: 54, lr: 4.00e-03, grad_scale: 16.0 +2023-03-09 22:41:07,433 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100384.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:41:29,672 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100402.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:41:37,421 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.960e+02 2.460e+02 2.795e+02 3.389e+02 7.041e+02, threshold=5.590e+02, percent-clipped=1.0 +2023-03-09 22:41:49,306 INFO [train.py:898] (2/4) Epoch 28, batch 2300, loss[loss=0.1581, simple_loss=0.2478, pruned_loss=0.0342, over 18378.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2474, pruned_loss=0.03249, over 3590943.52 frames. ], batch size: 52, lr: 4.00e-03, grad_scale: 16.0 +2023-03-09 22:42:41,813 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100463.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 22:42:48,934 INFO [train.py:898] (2/4) Epoch 28, batch 2350, loss[loss=0.13, simple_loss=0.2119, pruned_loss=0.02405, over 18438.00 frames. ], tot_loss[loss=0.156, simple_loss=0.2468, pruned_loss=0.03254, over 3588001.05 frames. ], batch size: 43, lr: 4.00e-03, grad_scale: 16.0 +2023-03-09 22:43:35,006 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6775, 3.7058, 3.5106, 3.1217, 3.4431, 2.8557, 2.8579, 3.6486], + device='cuda:2'), covar=tensor([0.0076, 0.0088, 0.0092, 0.0169, 0.0094, 0.0211, 0.0228, 0.0081], + device='cuda:2'), in_proj_covar=tensor([0.0159, 0.0178, 0.0148, 0.0198, 0.0157, 0.0190, 0.0193, 0.0136], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 22:43:36,867 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.732e+02 2.362e+02 2.730e+02 3.461e+02 8.407e+02, threshold=5.459e+02, percent-clipped=2.0 +2023-03-09 22:43:48,265 INFO [train.py:898] (2/4) Epoch 28, batch 2400, loss[loss=0.1521, simple_loss=0.2431, pruned_loss=0.03048, over 18395.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.2472, pruned_loss=0.03253, over 3595718.85 frames. ], batch size: 50, lr: 4.00e-03, grad_scale: 16.0 +2023-03-09 22:44:21,857 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3469, 2.7501, 2.3855, 2.7262, 3.4412, 3.3973, 3.0075, 2.6692], + device='cuda:2'), covar=tensor([0.0206, 0.0314, 0.0583, 0.0447, 0.0215, 0.0184, 0.0379, 0.0446], + device='cuda:2'), in_proj_covar=tensor([0.0147, 0.0147, 0.0168, 0.0167, 0.0143, 0.0129, 0.0161, 0.0164], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 22:44:46,629 INFO [train.py:898] (2/4) Epoch 28, batch 2450, loss[loss=0.1868, simple_loss=0.28, pruned_loss=0.04679, over 15971.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2465, pruned_loss=0.03228, over 3587628.99 frames. ], batch size: 94, lr: 3.99e-03, grad_scale: 16.0 +2023-03-09 22:45:16,719 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100594.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:45:34,575 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.748e+02 2.510e+02 2.947e+02 3.432e+02 5.400e+02, threshold=5.894e+02, percent-clipped=0.0 +2023-03-09 22:45:45,723 INFO [train.py:898] (2/4) Epoch 28, batch 2500, loss[loss=0.1561, simple_loss=0.2395, pruned_loss=0.03637, over 18252.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.246, pruned_loss=0.03207, over 3588349.01 frames. ], batch size: 47, lr: 3.99e-03, grad_scale: 16.0 +2023-03-09 22:45:46,115 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4398, 2.7693, 3.9489, 3.4408, 2.4670, 4.1750, 3.6613, 2.7163], + device='cuda:2'), covar=tensor([0.0548, 0.1403, 0.0384, 0.0489, 0.1594, 0.0260, 0.0672, 0.0969], + device='cuda:2'), in_proj_covar=tensor([0.0225, 0.0247, 0.0236, 0.0175, 0.0231, 0.0221, 0.0258, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 22:46:39,048 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100663.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 22:46:45,730 INFO [train.py:898] (2/4) Epoch 28, batch 2550, loss[loss=0.1491, simple_loss=0.2498, pruned_loss=0.02415, over 18502.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2462, pruned_loss=0.03237, over 3581907.34 frames. ], batch size: 51, lr: 3.99e-03, grad_scale: 16.0 +2023-03-09 22:46:47,198 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8887, 5.2069, 2.7239, 5.0324, 4.9049, 5.2005, 4.9843, 2.6585], + device='cuda:2'), covar=tensor([0.0237, 0.0059, 0.0758, 0.0074, 0.0081, 0.0067, 0.0093, 0.1005], + device='cuda:2'), in_proj_covar=tensor([0.0094, 0.0083, 0.0098, 0.0099, 0.0091, 0.0080, 0.0087, 0.0099], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 22:46:57,067 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8752, 5.3476, 5.3087, 5.3353, 4.7855, 5.2204, 4.7176, 5.2135], + device='cuda:2'), covar=tensor([0.0239, 0.0289, 0.0201, 0.0390, 0.0378, 0.0247, 0.1126, 0.0291], + device='cuda:2'), in_proj_covar=tensor([0.0237, 0.0279, 0.0282, 0.0358, 0.0291, 0.0288, 0.0322, 0.0281], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 22:47:33,005 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.831e+02 2.422e+02 2.744e+02 3.332e+02 5.019e+02, threshold=5.488e+02, percent-clipped=0.0 +2023-03-09 22:47:36,137 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100711.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 22:47:44,777 INFO [train.py:898] (2/4) Epoch 28, batch 2600, loss[loss=0.1534, simple_loss=0.2493, pruned_loss=0.02876, over 18359.00 frames. ], tot_loss[loss=0.1552, simple_loss=0.2458, pruned_loss=0.03226, over 3586197.61 frames. ], batch size: 55, lr: 3.99e-03, grad_scale: 16.0 +2023-03-09 22:47:58,491 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100730.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 22:48:31,119 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100758.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 22:48:43,893 INFO [train.py:898] (2/4) Epoch 28, batch 2650, loss[loss=0.1826, simple_loss=0.2645, pruned_loss=0.05035, over 12403.00 frames. ], tot_loss[loss=0.155, simple_loss=0.2458, pruned_loss=0.03212, over 3582078.75 frames. ], batch size: 129, lr: 3.99e-03, grad_scale: 16.0 +2023-03-09 22:48:59,716 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.98 vs. limit=5.0 +2023-03-09 22:49:09,555 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100791.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 22:49:31,290 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.698e+02 2.383e+02 2.847e+02 3.406e+02 8.018e+02, threshold=5.694e+02, percent-clipped=5.0 +2023-03-09 22:49:43,090 INFO [train.py:898] (2/4) Epoch 28, batch 2700, loss[loss=0.1514, simple_loss=0.2481, pruned_loss=0.02733, over 18306.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2462, pruned_loss=0.03218, over 3579552.13 frames. ], batch size: 54, lr: 3.99e-03, grad_scale: 16.0 +2023-03-09 22:50:41,484 INFO [train.py:898] (2/4) Epoch 28, batch 2750, loss[loss=0.1667, simple_loss=0.2589, pruned_loss=0.03729, over 18224.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2473, pruned_loss=0.03287, over 3572963.65 frames. ], batch size: 60, lr: 3.99e-03, grad_scale: 16.0 +2023-03-09 22:50:58,256 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.76 vs. limit=2.0 +2023-03-09 22:51:10,659 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100894.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:51:27,171 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.680e+02 2.345e+02 2.707e+02 3.433e+02 5.623e+02, threshold=5.414e+02, percent-clipped=0.0 +2023-03-09 22:51:40,786 INFO [train.py:898] (2/4) Epoch 28, batch 2800, loss[loss=0.1759, simple_loss=0.2662, pruned_loss=0.04275, over 18502.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.2476, pruned_loss=0.03312, over 3578104.79 frames. ], batch size: 59, lr: 3.99e-03, grad_scale: 16.0 +2023-03-09 22:51:44,750 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3159, 2.7349, 2.3380, 2.6360, 3.4101, 3.2657, 2.9796, 2.6636], + device='cuda:2'), covar=tensor([0.0212, 0.0284, 0.0610, 0.0418, 0.0199, 0.0211, 0.0389, 0.0423], + device='cuda:2'), in_proj_covar=tensor([0.0148, 0.0148, 0.0169, 0.0168, 0.0145, 0.0131, 0.0162, 0.0166], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 22:52:07,526 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=100942.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:52:38,807 INFO [train.py:898] (2/4) Epoch 28, batch 2850, loss[loss=0.1486, simple_loss=0.2472, pruned_loss=0.02497, over 15831.00 frames. ], tot_loss[loss=0.1571, simple_loss=0.2481, pruned_loss=0.03302, over 3583170.65 frames. ], batch size: 94, lr: 3.99e-03, grad_scale: 16.0 +2023-03-09 22:53:08,923 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7000, 3.2177, 4.0094, 2.8773, 3.6448, 2.6293, 2.7464, 2.2094], + device='cuda:2'), covar=tensor([0.1198, 0.0947, 0.0319, 0.0863, 0.0658, 0.2376, 0.2476, 0.1930], + device='cuda:2'), in_proj_covar=tensor([0.0235, 0.0256, 0.0232, 0.0211, 0.0269, 0.0283, 0.0341, 0.0250], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 22:53:14,096 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5479, 2.3843, 2.5848, 2.6281, 3.1362, 4.6692, 4.5209, 3.1994], + device='cuda:2'), covar=tensor([0.2218, 0.2622, 0.3211, 0.2054, 0.2601, 0.0373, 0.0437, 0.1145], + device='cuda:2'), in_proj_covar=tensor([0.0333, 0.0366, 0.0417, 0.0292, 0.0398, 0.0269, 0.0304, 0.0275], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 22:53:26,398 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.467e+02 2.435e+02 2.818e+02 3.350e+02 5.539e+02, threshold=5.635e+02, percent-clipped=3.0 +2023-03-09 22:53:37,851 INFO [train.py:898] (2/4) Epoch 28, batch 2900, loss[loss=0.1795, simple_loss=0.263, pruned_loss=0.04804, over 12122.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.248, pruned_loss=0.0329, over 3571733.52 frames. ], batch size: 129, lr: 3.99e-03, grad_scale: 16.0 +2023-03-09 22:54:25,159 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=101058.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:54:37,504 INFO [train.py:898] (2/4) Epoch 28, batch 2950, loss[loss=0.1406, simple_loss=0.2327, pruned_loss=0.02426, over 18395.00 frames. ], tot_loss[loss=0.1559, simple_loss=0.2468, pruned_loss=0.03251, over 3582646.95 frames. ], batch size: 48, lr: 3.98e-03, grad_scale: 16.0 +2023-03-09 22:54:58,166 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101086.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 22:55:21,311 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=101106.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:55:22,654 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101107.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:55:24,472 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.540e+02 2.503e+02 3.089e+02 3.620e+02 7.481e+02, threshold=6.178e+02, percent-clipped=1.0 +2023-03-09 22:55:35,814 INFO [train.py:898] (2/4) Epoch 28, batch 3000, loss[loss=0.1581, simple_loss=0.2507, pruned_loss=0.03277, over 18081.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2476, pruned_loss=0.03282, over 3569237.17 frames. ], batch size: 62, lr: 3.98e-03, grad_scale: 16.0 +2023-03-09 22:55:35,814 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 22:55:47,891 INFO [train.py:932] (2/4) Epoch 28, validation: loss=0.1496, simple_loss=0.2475, pruned_loss=0.02587, over 944034.00 frames. +2023-03-09 22:55:47,892 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 22:56:45,979 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=101168.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 22:56:46,688 INFO [train.py:898] (2/4) Epoch 28, batch 3050, loss[loss=0.1586, simple_loss=0.2592, pruned_loss=0.02899, over 17001.00 frames. ], tot_loss[loss=0.1567, simple_loss=0.2479, pruned_loss=0.03273, over 3571132.06 frames. ], batch size: 78, lr: 3.98e-03, grad_scale: 16.0 +2023-03-09 22:57:32,633 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.576e+02 2.344e+02 2.759e+02 3.609e+02 9.420e+02, threshold=5.517e+02, percent-clipped=1.0 +2023-03-09 22:57:45,987 INFO [train.py:898] (2/4) Epoch 28, batch 3100, loss[loss=0.1487, simple_loss=0.2489, pruned_loss=0.02424, over 18484.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.2481, pruned_loss=0.03281, over 3576933.23 frames. ], batch size: 51, lr: 3.98e-03, grad_scale: 16.0 +2023-03-09 22:58:32,514 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7175, 3.6438, 5.2512, 3.1662, 4.4889, 2.5263, 3.2895, 1.7969], + device='cuda:2'), covar=tensor([0.1392, 0.1000, 0.0122, 0.0910, 0.0524, 0.2700, 0.2493, 0.2293], + device='cuda:2'), in_proj_covar=tensor([0.0233, 0.0255, 0.0231, 0.0210, 0.0268, 0.0283, 0.0340, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 22:58:45,527 INFO [train.py:898] (2/4) Epoch 28, batch 3150, loss[loss=0.1568, simple_loss=0.2531, pruned_loss=0.0303, over 16112.00 frames. ], tot_loss[loss=0.1572, simple_loss=0.2484, pruned_loss=0.03304, over 3575053.69 frames. ], batch size: 94, lr: 3.98e-03, grad_scale: 16.0 +2023-03-09 22:58:51,685 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4165, 5.9785, 5.5604, 5.7376, 5.5681, 5.3205, 6.0318, 5.9995], + device='cuda:2'), covar=tensor([0.1153, 0.0807, 0.0515, 0.0775, 0.1485, 0.0807, 0.0597, 0.0712], + device='cuda:2'), in_proj_covar=tensor([0.0644, 0.0569, 0.0408, 0.0590, 0.0788, 0.0589, 0.0801, 0.0618], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 22:59:31,368 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.645e+02 2.594e+02 3.004e+02 3.371e+02 7.669e+02, threshold=6.008e+02, percent-clipped=3.0 +2023-03-09 22:59:43,375 INFO [train.py:898] (2/4) Epoch 28, batch 3200, loss[loss=0.1325, simple_loss=0.2183, pruned_loss=0.02338, over 18405.00 frames. ], tot_loss[loss=0.1569, simple_loss=0.2477, pruned_loss=0.03302, over 3578218.33 frames. ], batch size: 48, lr: 3.98e-03, grad_scale: 16.0 +2023-03-09 23:00:41,857 INFO [train.py:898] (2/4) Epoch 28, batch 3250, loss[loss=0.1656, simple_loss=0.2637, pruned_loss=0.03376, over 18399.00 frames. ], tot_loss[loss=0.1565, simple_loss=0.2474, pruned_loss=0.03275, over 3580338.13 frames. ], batch size: 52, lr: 3.98e-03, grad_scale: 16.0 +2023-03-09 23:00:59,556 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8633, 3.7224, 5.2619, 2.9770, 4.6318, 2.5966, 3.1271, 1.9209], + device='cuda:2'), covar=tensor([0.1269, 0.0983, 0.0149, 0.0974, 0.0458, 0.2875, 0.2827, 0.2241], + device='cuda:2'), in_proj_covar=tensor([0.0232, 0.0254, 0.0230, 0.0209, 0.0266, 0.0282, 0.0338, 0.0248], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 23:01:02,723 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=101386.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 23:01:29,435 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.724e+02 2.697e+02 3.326e+02 3.955e+02 8.507e+02, threshold=6.653e+02, percent-clipped=3.0 +2023-03-09 23:01:40,864 INFO [train.py:898] (2/4) Epoch 28, batch 3300, loss[loss=0.1465, simple_loss=0.2373, pruned_loss=0.02785, over 18295.00 frames. ], tot_loss[loss=0.1566, simple_loss=0.2475, pruned_loss=0.0329, over 3556439.08 frames. ], batch size: 49, lr: 3.98e-03, grad_scale: 32.0 +2023-03-09 23:02:00,151 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=101434.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 23:02:02,544 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1857, 3.1657, 1.8970, 4.0170, 2.6575, 3.3150, 2.0854, 3.2967], + device='cuda:2'), covar=tensor([0.0722, 0.0902, 0.1668, 0.0523, 0.0960, 0.0401, 0.1480, 0.0597], + device='cuda:2'), in_proj_covar=tensor([0.0223, 0.0232, 0.0193, 0.0295, 0.0197, 0.0269, 0.0206, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 23:02:17,674 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0201, 3.8593, 5.1178, 4.5626, 3.4359, 3.0754, 4.4461, 5.3684], + device='cuda:2'), covar=tensor([0.0782, 0.1477, 0.0208, 0.0389, 0.1019, 0.1231, 0.0447, 0.0238], + device='cuda:2'), in_proj_covar=tensor([0.0154, 0.0285, 0.0174, 0.0188, 0.0198, 0.0198, 0.0201, 0.0214], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 23:02:33,272 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101463.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:02:39,852 INFO [train.py:898] (2/4) Epoch 28, batch 3350, loss[loss=0.1418, simple_loss=0.225, pruned_loss=0.02926, over 18476.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2466, pruned_loss=0.03256, over 3569399.51 frames. ], batch size: 44, lr: 3.98e-03, grad_scale: 16.0 +2023-03-09 23:02:57,785 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5171, 3.3581, 2.3015, 4.2799, 2.9912, 3.9750, 2.4636, 3.7770], + device='cuda:2'), covar=tensor([0.0617, 0.0851, 0.1384, 0.0522, 0.0846, 0.0379, 0.1144, 0.0429], + device='cuda:2'), in_proj_covar=tensor([0.0223, 0.0232, 0.0194, 0.0296, 0.0197, 0.0270, 0.0207, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 23:03:25,421 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9096, 3.7519, 5.0094, 4.2864, 3.3966, 2.9050, 4.4220, 5.2111], + device='cuda:2'), covar=tensor([0.0757, 0.1310, 0.0192, 0.0444, 0.0957, 0.1246, 0.0378, 0.0229], + device='cuda:2'), in_proj_covar=tensor([0.0153, 0.0283, 0.0174, 0.0187, 0.0197, 0.0197, 0.0200, 0.0213], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 23:03:28,360 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.802e+02 2.584e+02 3.127e+02 3.784e+02 7.730e+02, threshold=6.254e+02, percent-clipped=1.0 +2023-03-09 23:03:31,232 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-03-09 23:03:38,771 INFO [train.py:898] (2/4) Epoch 28, batch 3400, loss[loss=0.1581, simple_loss=0.2558, pruned_loss=0.03019, over 17746.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2463, pruned_loss=0.03246, over 3572250.02 frames. ], batch size: 70, lr: 3.98e-03, grad_scale: 16.0 +2023-03-09 23:04:38,399 INFO [train.py:898] (2/4) Epoch 28, batch 3450, loss[loss=0.1518, simple_loss=0.2483, pruned_loss=0.02763, over 18382.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2456, pruned_loss=0.03209, over 3578669.41 frames. ], batch size: 52, lr: 3.98e-03, grad_scale: 16.0 +2023-03-09 23:04:42,218 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6013, 2.4805, 2.6422, 2.6760, 3.2075, 4.8204, 4.7171, 3.2959], + device='cuda:2'), covar=tensor([0.2197, 0.2627, 0.3129, 0.2071, 0.2594, 0.0283, 0.0378, 0.1166], + device='cuda:2'), in_proj_covar=tensor([0.0332, 0.0365, 0.0417, 0.0292, 0.0398, 0.0269, 0.0303, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 23:05:28,094 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.546e+02 2.589e+02 3.041e+02 3.646e+02 6.515e+02, threshold=6.081e+02, percent-clipped=1.0 +2023-03-09 23:05:37,347 INFO [train.py:898] (2/4) Epoch 28, batch 3500, loss[loss=0.1413, simple_loss=0.2257, pruned_loss=0.02846, over 18416.00 frames. ], tot_loss[loss=0.1543, simple_loss=0.245, pruned_loss=0.03178, over 3588357.67 frames. ], batch size: 42, lr: 3.97e-03, grad_scale: 8.0 +2023-03-09 23:06:34,324 INFO [train.py:898] (2/4) Epoch 28, batch 3550, loss[loss=0.1652, simple_loss=0.2526, pruned_loss=0.03891, over 17983.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2446, pruned_loss=0.0315, over 3600292.39 frames. ], batch size: 65, lr: 3.97e-03, grad_scale: 8.0 +2023-03-09 23:06:50,654 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101684.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:07:08,828 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5879, 6.1373, 5.7149, 5.8701, 5.7204, 5.5423, 6.1978, 6.1382], + device='cuda:2'), covar=tensor([0.1170, 0.0777, 0.0406, 0.0688, 0.1450, 0.0775, 0.0619, 0.0719], + device='cuda:2'), in_proj_covar=tensor([0.0645, 0.0569, 0.0408, 0.0594, 0.0788, 0.0590, 0.0805, 0.0618], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 23:07:19,608 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.673e+02 2.328e+02 2.616e+02 3.053e+02 4.840e+02, threshold=5.233e+02, percent-clipped=0.0 +2023-03-09 23:07:28,400 INFO [train.py:898] (2/4) Epoch 28, batch 3600, loss[loss=0.1467, simple_loss=0.2318, pruned_loss=0.03077, over 18356.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2445, pruned_loss=0.03153, over 3601570.67 frames. ], batch size: 46, lr: 3.97e-03, grad_scale: 8.0 +2023-03-09 23:07:51,948 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3686, 2.9049, 4.1047, 3.4752, 2.4778, 4.3506, 3.7956, 2.7317], + device='cuda:2'), covar=tensor([0.0636, 0.1366, 0.0337, 0.0513, 0.1640, 0.0220, 0.0602, 0.1013], + device='cuda:2'), in_proj_covar=tensor([0.0223, 0.0246, 0.0235, 0.0174, 0.0228, 0.0219, 0.0258, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 23:07:57,181 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=101745.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:08:02,782 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.1101, 4.1097, 3.8054, 4.0479, 4.0868, 3.6173, 4.0552, 3.7644], + device='cuda:2'), covar=tensor([0.0706, 0.0907, 0.1595, 0.0940, 0.0838, 0.0709, 0.0668, 0.1400], + device='cuda:2'), in_proj_covar=tensor([0.0522, 0.0590, 0.0730, 0.0458, 0.0483, 0.0540, 0.0573, 0.0711], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 23:08:29,836 INFO [train.py:898] (2/4) Epoch 29, batch 0, loss[loss=0.1709, simple_loss=0.2625, pruned_loss=0.0396, over 17964.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2625, pruned_loss=0.0396, over 17964.00 frames. ], batch size: 65, lr: 3.90e-03, grad_scale: 8.0 +2023-03-09 23:08:29,836 INFO [train.py:923] (2/4) Computing validation loss +2023-03-09 23:08:41,776 INFO [train.py:932] (2/4) Epoch 29, validation: loss=0.1494, simple_loss=0.2476, pruned_loss=0.02556, over 944034.00 frames. +2023-03-09 23:08:41,776 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-09 23:08:54,555 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=101763.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:09:40,310 INFO [train.py:898] (2/4) Epoch 29, batch 50, loss[loss=0.1591, simple_loss=0.2602, pruned_loss=0.02897, over 18365.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2462, pruned_loss=0.03178, over 822619.37 frames. ], batch size: 55, lr: 3.90e-03, grad_scale: 4.0 +2023-03-09 23:09:50,574 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=101811.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:09:51,452 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.892e+02 2.454e+02 2.836e+02 3.650e+02 1.455e+03, threshold=5.673e+02, percent-clipped=9.0 +2023-03-09 23:10:10,900 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6396, 2.9805, 2.5252, 2.9357, 3.7189, 3.6296, 3.2242, 2.9090], + device='cuda:2'), covar=tensor([0.0175, 0.0276, 0.0609, 0.0455, 0.0183, 0.0167, 0.0374, 0.0431], + device='cuda:2'), in_proj_covar=tensor([0.0148, 0.0146, 0.0168, 0.0167, 0.0143, 0.0131, 0.0162, 0.0164], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 23:10:38,658 INFO [train.py:898] (2/4) Epoch 29, batch 100, loss[loss=0.1547, simple_loss=0.247, pruned_loss=0.03114, over 17772.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2472, pruned_loss=0.03255, over 1433890.86 frames. ], batch size: 70, lr: 3.90e-03, grad_scale: 4.0 +2023-03-09 23:11:26,440 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.40 vs. limit=2.0 +2023-03-09 23:11:38,480 INFO [train.py:898] (2/4) Epoch 29, batch 150, loss[loss=0.142, simple_loss=0.2225, pruned_loss=0.03075, over 18480.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2463, pruned_loss=0.03219, over 1917735.08 frames. ], batch size: 44, lr: 3.90e-03, grad_scale: 4.0 +2023-03-09 23:11:49,330 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.704e+02 2.518e+02 2.968e+02 3.653e+02 8.556e+02, threshold=5.936e+02, percent-clipped=5.0 +2023-03-09 23:11:56,330 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9202, 3.8478, 5.1769, 4.5373, 3.5348, 3.1602, 4.6386, 5.3873], + device='cuda:2'), covar=tensor([0.0785, 0.1494, 0.0198, 0.0398, 0.0880, 0.1180, 0.0336, 0.0242], + device='cuda:2'), in_proj_covar=tensor([0.0156, 0.0287, 0.0176, 0.0189, 0.0199, 0.0200, 0.0203, 0.0216], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 23:12:00,402 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101920.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 23:12:38,454 INFO [train.py:898] (2/4) Epoch 29, batch 200, loss[loss=0.1567, simple_loss=0.251, pruned_loss=0.03114, over 16218.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.246, pruned_loss=0.03211, over 2284300.82 frames. ], batch size: 94, lr: 3.90e-03, grad_scale: 4.0 +2023-03-09 23:12:55,851 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7315, 3.5758, 2.4223, 4.5317, 3.2381, 4.3299, 2.7112, 3.9974], + device='cuda:2'), covar=tensor([0.0608, 0.0800, 0.1329, 0.0510, 0.0816, 0.0344, 0.1071, 0.0439], + device='cuda:2'), in_proj_covar=tensor([0.0223, 0.0232, 0.0193, 0.0296, 0.0197, 0.0270, 0.0206, 0.0208], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 23:13:12,269 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=101981.0, num_to_drop=1, layers_to_drop={3} +2023-03-09 23:13:17,872 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8447, 4.0463, 2.5778, 3.9420, 5.1487, 2.6522, 3.7848, 3.9873], + device='cuda:2'), covar=tensor([0.0202, 0.1148, 0.1554, 0.0660, 0.0108, 0.1223, 0.0686, 0.0730], + device='cuda:2'), in_proj_covar=tensor([0.0188, 0.0286, 0.0213, 0.0203, 0.0147, 0.0188, 0.0226, 0.0234], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 23:13:18,843 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101987.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:13:31,518 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9855, 5.0951, 5.1166, 4.8052, 4.8074, 4.8526, 5.1434, 5.2373], + device='cuda:2'), covar=tensor([0.0075, 0.0059, 0.0060, 0.0117, 0.0063, 0.0150, 0.0068, 0.0091], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0075, 0.0081, 0.0100, 0.0079, 0.0109, 0.0092, 0.0092], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 23:13:42,372 INFO [train.py:898] (2/4) Epoch 29, batch 250, loss[loss=0.1313, simple_loss=0.2113, pruned_loss=0.02567, over 18390.00 frames. ], tot_loss[loss=0.1539, simple_loss=0.2445, pruned_loss=0.03159, over 2584286.56 frames. ], batch size: 42, lr: 3.90e-03, grad_scale: 4.0 +2023-03-09 23:13:48,638 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6135, 3.5870, 2.3037, 4.4686, 3.1615, 4.1707, 2.6226, 3.8919], + device='cuda:2'), covar=tensor([0.0655, 0.0727, 0.1427, 0.0484, 0.0823, 0.0287, 0.1155, 0.0444], + device='cuda:2'), in_proj_covar=tensor([0.0223, 0.0231, 0.0193, 0.0295, 0.0196, 0.0269, 0.0205, 0.0207], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 23:13:52,740 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.574e+02 2.573e+02 2.918e+02 3.581e+02 7.098e+02, threshold=5.836e+02, percent-clipped=3.0 +2023-03-09 23:14:26,062 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=102040.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:14:35,221 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=102048.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:14:41,102 INFO [train.py:898] (2/4) Epoch 29, batch 300, loss[loss=0.183, simple_loss=0.2731, pruned_loss=0.04644, over 17956.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2447, pruned_loss=0.03176, over 2799555.42 frames. ], batch size: 65, lr: 3.90e-03, grad_scale: 4.0 +2023-03-09 23:15:40,929 INFO [train.py:898] (2/4) Epoch 29, batch 350, loss[loss=0.1554, simple_loss=0.2435, pruned_loss=0.03365, over 18256.00 frames. ], tot_loss[loss=0.154, simple_loss=0.2448, pruned_loss=0.0316, over 2983126.62 frames. ], batch size: 47, lr: 3.89e-03, grad_scale: 4.0 +2023-03-09 23:15:48,446 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3533, 5.8856, 5.4697, 5.6374, 5.4961, 5.2652, 5.9338, 5.9052], + device='cuda:2'), covar=tensor([0.1239, 0.0730, 0.0479, 0.0742, 0.1368, 0.0745, 0.0567, 0.0642], + device='cuda:2'), in_proj_covar=tensor([0.0640, 0.0563, 0.0405, 0.0590, 0.0781, 0.0584, 0.0800, 0.0613], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 23:15:51,456 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.890e+02 2.462e+02 2.956e+02 3.341e+02 6.335e+02, threshold=5.913e+02, percent-clipped=2.0 +2023-03-09 23:16:00,449 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-03-09 23:16:19,805 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9025, 2.6301, 2.8835, 3.1009, 3.3722, 4.8701, 5.0135, 3.3955], + device='cuda:2'), covar=tensor([0.2012, 0.2517, 0.3259, 0.1795, 0.2495, 0.0347, 0.0298, 0.1133], + device='cuda:2'), in_proj_covar=tensor([0.0333, 0.0366, 0.0418, 0.0294, 0.0400, 0.0270, 0.0304, 0.0276], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-09 23:16:40,559 INFO [train.py:898] (2/4) Epoch 29, batch 400, loss[loss=0.1885, simple_loss=0.2724, pruned_loss=0.05229, over 12295.00 frames. ], tot_loss[loss=0.1534, simple_loss=0.2441, pruned_loss=0.03134, over 3122574.32 frames. ], batch size: 129, lr: 3.89e-03, grad_scale: 8.0 +2023-03-09 23:17:26,714 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.24 vs. limit=5.0 +2023-03-09 23:17:40,600 INFO [train.py:898] (2/4) Epoch 29, batch 450, loss[loss=0.1862, simple_loss=0.2739, pruned_loss=0.0492, over 18204.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2446, pruned_loss=0.03139, over 3233641.13 frames. ], batch size: 60, lr: 3.89e-03, grad_scale: 8.0 +2023-03-09 23:17:50,999 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.718e+02 2.619e+02 3.042e+02 3.635e+02 5.576e+02, threshold=6.084e+02, percent-clipped=0.0 +2023-03-09 23:18:40,668 INFO [train.py:898] (2/4) Epoch 29, batch 500, loss[loss=0.1467, simple_loss=0.2295, pruned_loss=0.032, over 18484.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2446, pruned_loss=0.03135, over 3301713.14 frames. ], batch size: 44, lr: 3.89e-03, grad_scale: 8.0 +2023-03-09 23:19:07,688 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=102276.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 23:19:40,227 INFO [train.py:898] (2/4) Epoch 29, batch 550, loss[loss=0.1762, simple_loss=0.269, pruned_loss=0.04175, over 18116.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.2453, pruned_loss=0.03183, over 3358578.30 frames. ], batch size: 62, lr: 3.89e-03, grad_scale: 8.0 +2023-03-09 23:19:50,542 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.948e+02 2.553e+02 3.006e+02 3.452e+02 5.617e+02, threshold=6.011e+02, percent-clipped=0.0 +2023-03-09 23:19:58,785 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-03-09 23:20:24,920 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=102340.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:20:28,153 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=102343.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:20:37,286 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.61 vs. limit=2.0 +2023-03-09 23:20:39,932 INFO [train.py:898] (2/4) Epoch 29, batch 600, loss[loss=0.1484, simple_loss=0.2388, pruned_loss=0.02898, over 18376.00 frames. ], tot_loss[loss=0.1543, simple_loss=0.2451, pruned_loss=0.03171, over 3417225.55 frames. ], batch size: 50, lr: 3.89e-03, grad_scale: 8.0 +2023-03-09 23:21:17,858 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6016, 2.3845, 2.6103, 2.5912, 3.1185, 4.8030, 4.7709, 3.0454], + device='cuda:2'), covar=tensor([0.2130, 0.2562, 0.2969, 0.2025, 0.2568, 0.0269, 0.0348, 0.1219], + device='cuda:2'), in_proj_covar=tensor([0.0332, 0.0365, 0.0416, 0.0292, 0.0398, 0.0270, 0.0303, 0.0274], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0001], + device='cuda:2') +2023-03-09 23:21:20,766 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=102388.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:21:39,515 INFO [train.py:898] (2/4) Epoch 29, batch 650, loss[loss=0.1574, simple_loss=0.254, pruned_loss=0.03038, over 18369.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2453, pruned_loss=0.03172, over 3456263.16 frames. ], batch size: 50, lr: 3.89e-03, grad_scale: 8.0 +2023-03-09 23:21:49,328 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.777e+02 2.518e+02 2.994e+02 3.652e+02 5.297e+02, threshold=5.988e+02, percent-clipped=0.0 +2023-03-09 23:22:06,434 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5196, 4.8964, 4.7718, 4.8817, 4.5613, 5.3645, 4.9782, 4.6535], + device='cuda:2'), covar=tensor([0.1227, 0.0943, 0.0880, 0.0841, 0.1298, 0.0794, 0.0748, 0.1817], + device='cuda:2'), in_proj_covar=tensor([0.0381, 0.0311, 0.0338, 0.0339, 0.0347, 0.0452, 0.0306, 0.0448], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 23:22:38,229 INFO [train.py:898] (2/4) Epoch 29, batch 700, loss[loss=0.1692, simple_loss=0.2723, pruned_loss=0.03308, over 15997.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2457, pruned_loss=0.03185, over 3479893.10 frames. ], batch size: 94, lr: 3.89e-03, grad_scale: 8.0 +2023-03-09 23:23:37,714 INFO [train.py:898] (2/4) Epoch 29, batch 750, loss[loss=0.1572, simple_loss=0.2559, pruned_loss=0.0292, over 18402.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.2449, pruned_loss=0.03173, over 3505055.08 frames. ], batch size: 52, lr: 3.89e-03, grad_scale: 8.0 +2023-03-09 23:23:48,497 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.674e+02 2.550e+02 3.053e+02 3.427e+02 7.248e+02, threshold=6.105e+02, percent-clipped=3.0 +2023-03-09 23:23:59,002 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2067, 5.2881, 5.2809, 4.9703, 5.0551, 5.0751, 5.3610, 5.3667], + device='cuda:2'), covar=tensor([0.0061, 0.0055, 0.0055, 0.0108, 0.0047, 0.0143, 0.0057, 0.0067], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0074, 0.0080, 0.0100, 0.0079, 0.0108, 0.0091, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 23:24:18,315 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8983, 4.2014, 4.1001, 4.2118, 3.8105, 4.1189, 3.8188, 4.1243], + device='cuda:2'), covar=tensor([0.0295, 0.0322, 0.0275, 0.0535, 0.0368, 0.0257, 0.0808, 0.0380], + device='cuda:2'), in_proj_covar=tensor([0.0237, 0.0281, 0.0280, 0.0360, 0.0290, 0.0289, 0.0321, 0.0281], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 23:24:35,932 INFO [train.py:898] (2/4) Epoch 29, batch 800, loss[loss=0.1575, simple_loss=0.2491, pruned_loss=0.03291, over 18631.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2461, pruned_loss=0.03227, over 3523143.14 frames. ], batch size: 52, lr: 3.89e-03, grad_scale: 8.0 +2023-03-09 23:24:57,303 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-09 23:25:03,674 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=102576.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 23:25:35,087 INFO [train.py:898] (2/4) Epoch 29, batch 850, loss[loss=0.1742, simple_loss=0.2626, pruned_loss=0.04284, over 18350.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.2461, pruned_loss=0.03206, over 3553106.82 frames. ], batch size: 55, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:25:44,383 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.65 vs. limit=5.0 +2023-03-09 23:25:45,074 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0797, 4.2452, 2.6947, 4.1384, 5.3908, 2.8807, 4.1502, 4.2033], + device='cuda:2'), covar=tensor([0.0224, 0.1186, 0.1487, 0.0620, 0.0085, 0.1084, 0.0572, 0.0647], + device='cuda:2'), in_proj_covar=tensor([0.0189, 0.0288, 0.0213, 0.0204, 0.0147, 0.0188, 0.0226, 0.0236], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 23:25:45,744 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.923e+02 2.608e+02 3.070e+02 3.838e+02 1.078e+03, threshold=6.141e+02, percent-clipped=3.0 +2023-03-09 23:25:47,228 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8108, 3.4021, 4.6209, 3.9986, 3.1070, 4.8675, 4.0904, 3.2831], + device='cuda:2'), covar=tensor([0.0527, 0.1240, 0.0278, 0.0435, 0.1432, 0.0230, 0.0560, 0.0861], + device='cuda:2'), in_proj_covar=tensor([0.0227, 0.0249, 0.0237, 0.0175, 0.0231, 0.0222, 0.0262, 0.0202], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 23:26:00,175 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=102624.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 23:26:10,199 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9283, 5.4292, 5.3518, 5.3745, 4.8600, 5.3139, 4.7806, 5.2909], + device='cuda:2'), covar=tensor([0.0248, 0.0249, 0.0192, 0.0450, 0.0425, 0.0214, 0.1024, 0.0300], + device='cuda:2'), in_proj_covar=tensor([0.0237, 0.0282, 0.0282, 0.0362, 0.0291, 0.0291, 0.0322, 0.0282], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 23:26:22,278 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=102643.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:26:34,016 INFO [train.py:898] (2/4) Epoch 29, batch 900, loss[loss=0.1362, simple_loss=0.2218, pruned_loss=0.02524, over 18165.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.2462, pruned_loss=0.03196, over 3566566.58 frames. ], batch size: 44, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:26:39,925 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3006, 5.7835, 5.4589, 5.5836, 5.3884, 5.2207, 5.8734, 5.7902], + device='cuda:2'), covar=tensor([0.1183, 0.0909, 0.0543, 0.0734, 0.1448, 0.0737, 0.0611, 0.0817], + device='cuda:2'), in_proj_covar=tensor([0.0650, 0.0572, 0.0409, 0.0597, 0.0793, 0.0594, 0.0814, 0.0624], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-09 23:27:18,912 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=102691.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:27:21,373 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0457, 5.4918, 2.5795, 5.4037, 5.1184, 5.4709, 5.2827, 2.7532], + device='cuda:2'), covar=tensor([0.0234, 0.0085, 0.0971, 0.0074, 0.0103, 0.0115, 0.0115, 0.1146], + device='cuda:2'), in_proj_covar=tensor([0.0094, 0.0085, 0.0099, 0.0101, 0.0092, 0.0081, 0.0088, 0.0100], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 23:27:32,896 INFO [train.py:898] (2/4) Epoch 29, batch 950, loss[loss=0.1546, simple_loss=0.2406, pruned_loss=0.03435, over 18268.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.2458, pruned_loss=0.03166, over 3578382.83 frames. ], batch size: 47, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:27:43,051 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.728e+02 2.495e+02 2.903e+02 3.483e+02 7.298e+02, threshold=5.805e+02, percent-clipped=1.0 +2023-03-09 23:28:24,448 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.90 vs. limit=5.0 +2023-03-09 23:28:31,532 INFO [train.py:898] (2/4) Epoch 29, batch 1000, loss[loss=0.1347, simple_loss=0.2178, pruned_loss=0.02585, over 18457.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2459, pruned_loss=0.03166, over 3577974.48 frames. ], batch size: 43, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:28:48,412 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8784, 4.7237, 4.9390, 4.5805, 4.5760, 4.8231, 5.0647, 4.9309], + device='cuda:2'), covar=tensor([0.0111, 0.0114, 0.0120, 0.0184, 0.0096, 0.0179, 0.0105, 0.0152], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0075, 0.0081, 0.0100, 0.0080, 0.0109, 0.0092, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 23:28:56,619 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-09 23:29:00,187 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5741, 2.3366, 2.4998, 2.7671, 2.8754, 4.7489, 4.8442, 3.3679], + device='cuda:2'), covar=tensor([0.2545, 0.3379, 0.3789, 0.2235, 0.3717, 0.0369, 0.0408, 0.1179], + device='cuda:2'), in_proj_covar=tensor([0.0333, 0.0366, 0.0417, 0.0294, 0.0398, 0.0271, 0.0305, 0.0275], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-09 23:29:04,608 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8481, 4.2008, 2.5102, 3.9493, 5.2142, 2.5522, 3.7643, 4.0017], + device='cuda:2'), covar=tensor([0.0208, 0.1100, 0.1711, 0.0775, 0.0100, 0.1442, 0.0766, 0.0745], + device='cuda:2'), in_proj_covar=tensor([0.0188, 0.0285, 0.0211, 0.0202, 0.0146, 0.0187, 0.0225, 0.0233], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 23:29:11,638 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-03-09 23:29:31,337 INFO [train.py:898] (2/4) Epoch 29, batch 1050, loss[loss=0.141, simple_loss=0.2272, pruned_loss=0.02744, over 18506.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.245, pruned_loss=0.03129, over 3588766.18 frames. ], batch size: 47, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:29:34,195 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9137, 3.6932, 5.1320, 3.2137, 4.4616, 2.6988, 3.1739, 1.7742], + device='cuda:2'), covar=tensor([0.1209, 0.1049, 0.0130, 0.0888, 0.0483, 0.2563, 0.2612, 0.2384], + device='cuda:2'), in_proj_covar=tensor([0.0235, 0.0256, 0.0233, 0.0211, 0.0267, 0.0283, 0.0341, 0.0250], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 23:29:42,517 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.563e+02 2.351e+02 2.635e+02 3.103e+02 8.908e+02, threshold=5.271e+02, percent-clipped=2.0 +2023-03-09 23:30:27,413 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=102851.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:30:29,891 INFO [train.py:898] (2/4) Epoch 29, batch 1100, loss[loss=0.1639, simple_loss=0.2568, pruned_loss=0.0355, over 17992.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2449, pruned_loss=0.03139, over 3593372.30 frames. ], batch size: 65, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:31:10,256 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=102887.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:31:28,098 INFO [train.py:898] (2/4) Epoch 29, batch 1150, loss[loss=0.1833, simple_loss=0.2723, pruned_loss=0.04714, over 18100.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2448, pruned_loss=0.0314, over 3589745.33 frames. ], batch size: 62, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:31:32,450 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0292, 5.0209, 5.0573, 4.8222, 4.8055, 4.9026, 5.1943, 5.1784], + device='cuda:2'), covar=tensor([0.0087, 0.0075, 0.0074, 0.0136, 0.0069, 0.0178, 0.0090, 0.0090], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0074, 0.0080, 0.0099, 0.0079, 0.0108, 0.0091, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 23:31:38,787 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.637e+02 2.444e+02 3.032e+02 3.732e+02 6.439e+02, threshold=6.064e+02, percent-clipped=4.0 +2023-03-09 23:31:39,258 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=102912.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:32:22,227 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=102948.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:32:27,595 INFO [train.py:898] (2/4) Epoch 29, batch 1200, loss[loss=0.1478, simple_loss=0.2432, pruned_loss=0.02621, over 18490.00 frames. ], tot_loss[loss=0.1534, simple_loss=0.2443, pruned_loss=0.03125, over 3593842.41 frames. ], batch size: 51, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:32:57,417 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6983, 3.5545, 4.9175, 4.2762, 3.2500, 2.9181, 4.4036, 5.1279], + device='cuda:2'), covar=tensor([0.0823, 0.1519, 0.0221, 0.0424, 0.1024, 0.1306, 0.0420, 0.0220], + device='cuda:2'), in_proj_covar=tensor([0.0153, 0.0282, 0.0175, 0.0186, 0.0197, 0.0196, 0.0201, 0.0214], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 23:33:17,622 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8002, 4.8850, 4.8823, 4.6161, 4.6607, 4.6378, 4.9743, 5.0021], + device='cuda:2'), covar=tensor([0.0079, 0.0065, 0.0060, 0.0127, 0.0063, 0.0161, 0.0067, 0.0077], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0074, 0.0080, 0.0100, 0.0079, 0.0108, 0.0091, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 23:33:26,586 INFO [train.py:898] (2/4) Epoch 29, batch 1250, loss[loss=0.1524, simple_loss=0.2491, pruned_loss=0.02783, over 18476.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.245, pruned_loss=0.03158, over 3590457.76 frames. ], batch size: 51, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:33:36,999 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.897e+02 2.667e+02 2.982e+02 3.378e+02 7.201e+02, threshold=5.965e+02, percent-clipped=1.0 +2023-03-09 23:33:43,303 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1904, 5.6101, 5.6059, 5.5926, 5.0908, 5.5182, 5.0420, 5.5496], + device='cuda:2'), covar=tensor([0.0215, 0.0254, 0.0157, 0.0353, 0.0373, 0.0215, 0.0919, 0.0264], + device='cuda:2'), in_proj_covar=tensor([0.0236, 0.0281, 0.0280, 0.0360, 0.0290, 0.0291, 0.0319, 0.0281], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 23:34:25,451 INFO [train.py:898] (2/4) Epoch 29, batch 1300, loss[loss=0.1589, simple_loss=0.2523, pruned_loss=0.03274, over 17734.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2451, pruned_loss=0.03154, over 3600717.66 frames. ], batch size: 70, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:34:30,501 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1158, 3.4520, 3.3996, 2.8734, 3.0827, 2.8461, 2.4679, 2.3235], + device='cuda:2'), covar=tensor([0.0268, 0.0163, 0.0135, 0.0320, 0.0356, 0.0257, 0.0617, 0.0727], + device='cuda:2'), in_proj_covar=tensor([0.0076, 0.0064, 0.0069, 0.0072, 0.0093, 0.0071, 0.0079, 0.0087], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 23:35:19,280 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103098.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:35:24,708 INFO [train.py:898] (2/4) Epoch 29, batch 1350, loss[loss=0.1571, simple_loss=0.2542, pruned_loss=0.02999, over 18294.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2452, pruned_loss=0.03184, over 3578935.50 frames. ], batch size: 54, lr: 3.88e-03, grad_scale: 8.0 +2023-03-09 23:35:30,975 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8649, 5.3547, 2.6621, 5.2058, 5.1198, 5.3812, 5.1822, 2.6121], + device='cuda:2'), covar=tensor([0.0254, 0.0084, 0.0876, 0.0093, 0.0076, 0.0072, 0.0091, 0.1097], + device='cuda:2'), in_proj_covar=tensor([0.0095, 0.0085, 0.0099, 0.0101, 0.0091, 0.0081, 0.0088, 0.0099], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 23:35:35,188 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.567e+02 2.524e+02 2.935e+02 3.557e+02 7.291e+02, threshold=5.869e+02, percent-clipped=2.0 +2023-03-09 23:35:41,665 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9945, 5.4688, 5.4202, 5.4629, 4.9051, 5.3560, 4.8099, 5.3362], + device='cuda:2'), covar=tensor([0.0241, 0.0256, 0.0183, 0.0394, 0.0386, 0.0223, 0.0971, 0.0326], + device='cuda:2'), in_proj_covar=tensor([0.0235, 0.0278, 0.0278, 0.0357, 0.0288, 0.0288, 0.0317, 0.0279], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 23:35:59,067 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103132.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:36:23,953 INFO [train.py:898] (2/4) Epoch 29, batch 1400, loss[loss=0.1743, simple_loss=0.2629, pruned_loss=0.04291, over 18299.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2449, pruned_loss=0.0317, over 3593317.87 frames. ], batch size: 57, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:36:31,060 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103159.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:36:46,315 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1865, 5.2576, 4.4762, 5.1344, 5.2180, 4.6591, 5.0288, 4.7344], + device='cuda:2'), covar=tensor([0.0896, 0.0776, 0.2653, 0.1240, 0.0759, 0.0599, 0.0841, 0.1453], + device='cuda:2'), in_proj_covar=tensor([0.0528, 0.0597, 0.0740, 0.0461, 0.0485, 0.0543, 0.0575, 0.0717], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 23:37:00,390 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8947, 5.3933, 5.3530, 5.4069, 4.8525, 5.2985, 4.7162, 5.3100], + device='cuda:2'), covar=tensor([0.0252, 0.0298, 0.0200, 0.0400, 0.0366, 0.0225, 0.1028, 0.0291], + device='cuda:2'), in_proj_covar=tensor([0.0237, 0.0281, 0.0280, 0.0360, 0.0290, 0.0290, 0.0320, 0.0281], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 23:37:03,794 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7422, 2.9759, 4.5222, 3.7857, 2.9244, 4.7611, 4.0681, 3.0102], + device='cuda:2'), covar=tensor([0.0504, 0.1575, 0.0266, 0.0457, 0.1479, 0.0202, 0.0578, 0.0935], + device='cuda:2'), in_proj_covar=tensor([0.0222, 0.0243, 0.0234, 0.0173, 0.0226, 0.0217, 0.0256, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-09 23:37:11,266 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103193.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:37:22,802 INFO [train.py:898] (2/4) Epoch 29, batch 1450, loss[loss=0.1604, simple_loss=0.2562, pruned_loss=0.03227, over 18468.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2457, pruned_loss=0.0318, over 3603892.56 frames. ], batch size: 59, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:37:27,603 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103207.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:37:30,345 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-09 23:37:33,042 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.757e+02 2.357e+02 2.757e+02 3.131e+02 6.312e+02, threshold=5.514e+02, percent-clipped=1.0 +2023-03-09 23:37:58,446 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8757, 2.9948, 2.7499, 2.9780, 3.8223, 3.8367, 3.3647, 3.0441], + device='cuda:2'), covar=tensor([0.0145, 0.0298, 0.0494, 0.0389, 0.0171, 0.0148, 0.0336, 0.0356], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0150, 0.0169, 0.0168, 0.0145, 0.0133, 0.0164, 0.0167], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 23:38:09,532 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103243.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:38:21,767 INFO [train.py:898] (2/4) Epoch 29, batch 1500, loss[loss=0.1552, simple_loss=0.2499, pruned_loss=0.03028, over 18573.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2453, pruned_loss=0.03176, over 3592997.41 frames. ], batch size: 54, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:38:33,722 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.13 vs. limit=5.0 +2023-03-09 23:39:20,020 INFO [train.py:898] (2/4) Epoch 29, batch 1550, loss[loss=0.1566, simple_loss=0.2505, pruned_loss=0.03136, over 18301.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2452, pruned_loss=0.0318, over 3603602.70 frames. ], batch size: 57, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:39:29,923 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.405e+02 2.624e+02 2.952e+02 3.606e+02 8.116e+02, threshold=5.905e+02, percent-clipped=6.0 +2023-03-09 23:40:18,840 INFO [train.py:898] (2/4) Epoch 29, batch 1600, loss[loss=0.1446, simple_loss=0.232, pruned_loss=0.02861, over 18371.00 frames. ], tot_loss[loss=0.1547, simple_loss=0.2457, pruned_loss=0.03185, over 3605388.34 frames. ], batch size: 46, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:40:34,720 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103366.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 23:41:00,029 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4423, 2.7368, 2.5010, 2.7650, 3.4656, 3.3892, 3.0515, 2.7624], + device='cuda:2'), covar=tensor([0.0177, 0.0323, 0.0609, 0.0408, 0.0223, 0.0200, 0.0390, 0.0393], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0150, 0.0170, 0.0169, 0.0146, 0.0133, 0.0165, 0.0168], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 23:41:18,051 INFO [train.py:898] (2/4) Epoch 29, batch 1650, loss[loss=0.1561, simple_loss=0.2524, pruned_loss=0.02987, over 18367.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2459, pruned_loss=0.03196, over 3603801.97 frames. ], batch size: 55, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:41:29,738 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.750e+02 2.322e+02 2.729e+02 3.382e+02 5.618e+02, threshold=5.459e+02, percent-clipped=0.0 +2023-03-09 23:41:47,088 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103427.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 23:42:17,236 INFO [train.py:898] (2/4) Epoch 29, batch 1700, loss[loss=0.1612, simple_loss=0.2544, pruned_loss=0.03397, over 18499.00 frames. ], tot_loss[loss=0.1548, simple_loss=0.2461, pruned_loss=0.03171, over 3609175.86 frames. ], batch size: 53, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:42:18,663 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103454.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:42:41,821 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6608, 2.8936, 2.5810, 2.9541, 3.6820, 3.6483, 3.1995, 2.9005], + device='cuda:2'), covar=tensor([0.0173, 0.0302, 0.0590, 0.0374, 0.0222, 0.0181, 0.0379, 0.0439], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0150, 0.0170, 0.0168, 0.0146, 0.0133, 0.0165, 0.0167], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 23:42:58,879 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103488.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:43:01,934 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7262, 3.6898, 3.5272, 3.1230, 3.4430, 2.8384, 2.8282, 3.6722], + device='cuda:2'), covar=tensor([0.0063, 0.0101, 0.0093, 0.0141, 0.0105, 0.0198, 0.0204, 0.0080], + device='cuda:2'), in_proj_covar=tensor([0.0158, 0.0178, 0.0148, 0.0197, 0.0157, 0.0189, 0.0193, 0.0138], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 23:43:16,090 INFO [train.py:898] (2/4) Epoch 29, batch 1750, loss[loss=0.1633, simple_loss=0.2558, pruned_loss=0.03539, over 18348.00 frames. ], tot_loss[loss=0.1536, simple_loss=0.2445, pruned_loss=0.03135, over 3614691.20 frames. ], batch size: 56, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:43:21,082 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103507.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:43:27,008 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.672e+02 2.484e+02 2.930e+02 3.685e+02 5.975e+02, threshold=5.860e+02, percent-clipped=1.0 +2023-03-09 23:44:04,277 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103543.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:44:16,117 INFO [train.py:898] (2/4) Epoch 29, batch 1800, loss[loss=0.1631, simple_loss=0.2521, pruned_loss=0.03711, over 18283.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2443, pruned_loss=0.03112, over 3606199.09 frames. ], batch size: 49, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:44:18,471 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103555.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:44:19,142 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-09 23:44:58,456 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5001, 5.5080, 5.0328, 5.4038, 5.4484, 4.8556, 5.3470, 5.0026], + device='cuda:2'), covar=tensor([0.0616, 0.0581, 0.1681, 0.1041, 0.0765, 0.0543, 0.0597, 0.1181], + device='cuda:2'), in_proj_covar=tensor([0.0528, 0.0599, 0.0741, 0.0463, 0.0488, 0.0545, 0.0577, 0.0720], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 23:45:00,579 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103591.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:45:15,441 INFO [train.py:898] (2/4) Epoch 29, batch 1850, loss[loss=0.1883, simple_loss=0.2735, pruned_loss=0.05155, over 12928.00 frames. ], tot_loss[loss=0.1543, simple_loss=0.2451, pruned_loss=0.03173, over 3586857.63 frames. ], batch size: 129, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:45:25,542 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.856e+02 2.569e+02 2.972e+02 3.705e+02 6.711e+02, threshold=5.945e+02, percent-clipped=3.0 +2023-03-09 23:45:32,648 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-03-09 23:45:59,023 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6781, 3.6130, 3.4500, 3.1779, 3.3972, 2.8236, 2.7781, 3.6537], + device='cuda:2'), covar=tensor([0.0076, 0.0112, 0.0098, 0.0134, 0.0104, 0.0220, 0.0223, 0.0077], + device='cuda:2'), in_proj_covar=tensor([0.0157, 0.0177, 0.0148, 0.0197, 0.0157, 0.0188, 0.0192, 0.0138], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 23:46:10,096 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.0533, 5.5468, 3.0598, 5.3432, 5.2428, 5.5555, 5.4283, 2.8936], + device='cuda:2'), covar=tensor([0.0200, 0.0060, 0.0628, 0.0062, 0.0064, 0.0059, 0.0063, 0.0923], + device='cuda:2'), in_proj_covar=tensor([0.0094, 0.0085, 0.0098, 0.0100, 0.0091, 0.0081, 0.0087, 0.0099], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 23:46:14,392 INFO [train.py:898] (2/4) Epoch 29, batch 1900, loss[loss=0.1333, simple_loss=0.2168, pruned_loss=0.02492, over 17684.00 frames. ], tot_loss[loss=0.155, simple_loss=0.246, pruned_loss=0.032, over 3572618.34 frames. ], batch size: 39, lr: 3.87e-03, grad_scale: 8.0 +2023-03-09 23:46:31,433 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9188, 3.6069, 5.0714, 3.0547, 4.3259, 2.5424, 3.3065, 1.6784], + device='cuda:2'), covar=tensor([0.1257, 0.1023, 0.0167, 0.0980, 0.0595, 0.2705, 0.2472, 0.2507], + device='cuda:2'), in_proj_covar=tensor([0.0234, 0.0256, 0.0234, 0.0210, 0.0268, 0.0284, 0.0340, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-09 23:47:14,030 INFO [train.py:898] (2/4) Epoch 29, batch 1950, loss[loss=0.1799, simple_loss=0.2618, pruned_loss=0.04894, over 18361.00 frames. ], tot_loss[loss=0.1552, simple_loss=0.2462, pruned_loss=0.0321, over 3573161.49 frames. ], batch size: 46, lr: 3.86e-03, grad_scale: 8.0 +2023-03-09 23:47:15,478 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9537, 4.9809, 5.0743, 4.7610, 4.8473, 4.7994, 5.0910, 5.1286], + device='cuda:2'), covar=tensor([0.0069, 0.0069, 0.0056, 0.0123, 0.0056, 0.0149, 0.0096, 0.0102], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0075, 0.0081, 0.0101, 0.0080, 0.0109, 0.0092, 0.0092], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-09 23:47:24,468 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.577e+02 2.406e+02 2.960e+02 3.499e+02 1.191e+03, threshold=5.920e+02, percent-clipped=3.0 +2023-03-09 23:47:36,649 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103722.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 23:47:57,861 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103740.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:48:13,370 INFO [train.py:898] (2/4) Epoch 29, batch 2000, loss[loss=0.176, simple_loss=0.27, pruned_loss=0.04098, over 18348.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2468, pruned_loss=0.03239, over 3585952.34 frames. ], batch size: 56, lr: 3.86e-03, grad_scale: 8.0 +2023-03-09 23:48:14,852 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103754.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:48:18,475 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-03-09 23:48:27,217 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103765.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:48:54,538 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103788.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:48:59,158 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6732, 3.4988, 2.3882, 4.4621, 3.1254, 4.2224, 2.6915, 4.0181], + device='cuda:2'), covar=tensor([0.0611, 0.0919, 0.1452, 0.0497, 0.0838, 0.0339, 0.1173, 0.0429], + device='cuda:2'), in_proj_covar=tensor([0.0224, 0.0233, 0.0196, 0.0298, 0.0199, 0.0272, 0.0208, 0.0209], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 23:49:09,597 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103801.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:49:11,718 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103802.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:49:12,642 INFO [train.py:898] (2/4) Epoch 29, batch 2050, loss[loss=0.1613, simple_loss=0.2577, pruned_loss=0.0325, over 18313.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2465, pruned_loss=0.03232, over 3580957.89 frames. ], batch size: 54, lr: 3.86e-03, grad_scale: 4.0 +2023-03-09 23:49:21,986 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.3232, 5.2843, 5.6162, 5.7348, 5.2277, 6.2003, 5.8475, 5.4960], + device='cuda:2'), covar=tensor([0.1064, 0.0623, 0.0780, 0.0616, 0.1411, 0.0672, 0.0659, 0.1554], + device='cuda:2'), in_proj_covar=tensor([0.0379, 0.0310, 0.0337, 0.0338, 0.0348, 0.0452, 0.0307, 0.0447], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 23:49:23,960 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.828e+02 2.583e+02 3.001e+02 3.513e+02 6.272e+02, threshold=6.003e+02, percent-clipped=1.0 +2023-03-09 23:49:35,327 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-03-09 23:49:39,197 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103826.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:49:50,969 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=103836.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:50:10,346 INFO [train.py:898] (2/4) Epoch 29, batch 2100, loss[loss=0.1509, simple_loss=0.244, pruned_loss=0.02887, over 18532.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2464, pruned_loss=0.03229, over 3588179.47 frames. ], batch size: 49, lr: 3.86e-03, grad_scale: 4.0 +2023-03-09 23:50:38,419 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103876.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:50:44,113 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-03-09 23:51:09,180 INFO [train.py:898] (2/4) Epoch 29, batch 2150, loss[loss=0.1462, simple_loss=0.2445, pruned_loss=0.02392, over 18629.00 frames. ], tot_loss[loss=0.1553, simple_loss=0.2464, pruned_loss=0.03213, over 3596703.65 frames. ], batch size: 52, lr: 3.86e-03, grad_scale: 4.0 +2023-03-09 23:51:21,588 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.841e+02 2.540e+02 2.937e+02 3.473e+02 7.203e+02, threshold=5.874e+02, percent-clipped=1.0 +2023-03-09 23:51:33,227 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2756, 5.2563, 4.9338, 5.2093, 5.1912, 4.6683, 5.1125, 4.8513], + device='cuda:2'), covar=tensor([0.0425, 0.0460, 0.1291, 0.0731, 0.0563, 0.0401, 0.0452, 0.1158], + device='cuda:2'), in_proj_covar=tensor([0.0523, 0.0594, 0.0734, 0.0457, 0.0485, 0.0541, 0.0571, 0.0714], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-09 23:51:43,757 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9321, 5.1781, 2.6208, 5.0694, 4.8553, 5.0994, 4.8928, 2.2988], + device='cuda:2'), covar=tensor([0.0269, 0.0138, 0.1014, 0.0112, 0.0129, 0.0199, 0.0192, 0.1653], + device='cuda:2'), in_proj_covar=tensor([0.0094, 0.0085, 0.0099, 0.0101, 0.0091, 0.0081, 0.0088, 0.0099], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-09 23:51:50,070 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103937.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 23:52:07,797 INFO [train.py:898] (2/4) Epoch 29, batch 2200, loss[loss=0.1559, simple_loss=0.2458, pruned_loss=0.03304, over 18263.00 frames. ], tot_loss[loss=0.1556, simple_loss=0.2467, pruned_loss=0.03222, over 3588648.53 frames. ], batch size: 47, lr: 3.86e-03, grad_scale: 4.0 +2023-03-09 23:52:52,490 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103990.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:53:00,533 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103997.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:53:11,842 INFO [train.py:898] (2/4) Epoch 29, batch 2250, loss[loss=0.1443, simple_loss=0.2342, pruned_loss=0.02724, over 18534.00 frames. ], tot_loss[loss=0.1555, simple_loss=0.2466, pruned_loss=0.03224, over 3591800.41 frames. ], batch size: 49, lr: 3.86e-03, grad_scale: 4.0 +2023-03-09 23:53:23,834 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.696e+02 2.605e+02 3.021e+02 3.898e+02 6.777e+02, threshold=6.041e+02, percent-clipped=7.0 +2023-03-09 23:53:35,073 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104022.0, num_to_drop=1, layers_to_drop={2} +2023-03-09 23:53:55,228 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.00 vs. limit=5.0 +2023-03-09 23:53:59,215 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-09 23:54:06,854 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104049.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:54:09,214 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104051.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:54:11,012 INFO [train.py:898] (2/4) Epoch 29, batch 2300, loss[loss=0.1696, simple_loss=0.268, pruned_loss=0.03556, over 18498.00 frames. ], tot_loss[loss=0.1561, simple_loss=0.2473, pruned_loss=0.03248, over 3590632.00 frames. ], batch size: 53, lr: 3.86e-03, grad_scale: 4.0 +2023-03-09 23:54:17,128 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104058.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:54:31,635 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104070.0, num_to_drop=1, layers_to_drop={1} +2023-03-09 23:54:38,474 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7769, 4.3962, 4.3780, 3.2845, 3.6348, 3.4034, 2.6298, 2.4481], + device='cuda:2'), covar=tensor([0.0241, 0.0141, 0.0100, 0.0377, 0.0384, 0.0248, 0.0722, 0.0910], + device='cuda:2'), in_proj_covar=tensor([0.0077, 0.0066, 0.0071, 0.0074, 0.0095, 0.0073, 0.0080, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0006, 0.0005, 0.0005, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 23:55:02,119 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104096.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:55:09,793 INFO [train.py:898] (2/4) Epoch 29, batch 2350, loss[loss=0.1574, simple_loss=0.252, pruned_loss=0.03144, over 18360.00 frames. ], tot_loss[loss=0.1568, simple_loss=0.248, pruned_loss=0.03278, over 3592904.73 frames. ], batch size: 55, lr: 3.86e-03, grad_scale: 4.0 +2023-03-09 23:55:17,766 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104110.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:55:20,808 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.751e+02 2.516e+02 3.034e+02 3.575e+02 8.102e+02, threshold=6.068e+02, percent-clipped=2.0 +2023-03-09 23:55:30,847 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104121.0, num_to_drop=0, layers_to_drop=set() +2023-03-09 23:55:43,911 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6160, 4.0639, 5.2586, 4.5961, 3.1659, 2.9376, 4.6600, 5.5210], + device='cuda:2'), covar=tensor([0.0843, 0.1325, 0.0179, 0.0362, 0.1043, 0.1212, 0.0367, 0.0137], + device='cuda:2'), in_proj_covar=tensor([0.0156, 0.0286, 0.0178, 0.0189, 0.0199, 0.0199, 0.0203, 0.0217], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 23:56:08,417 INFO [train.py:898] (2/4) Epoch 29, batch 2400, loss[loss=0.1572, simple_loss=0.2524, pruned_loss=0.03102, over 18116.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2471, pruned_loss=0.03259, over 3585066.75 frames. ], batch size: 62, lr: 3.86e-03, grad_scale: 8.0 +2023-03-09 23:57:07,524 INFO [train.py:898] (2/4) Epoch 29, batch 2450, loss[loss=0.1781, simple_loss=0.2628, pruned_loss=0.04669, over 15953.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2472, pruned_loss=0.03258, over 3580883.01 frames. ], batch size: 94, lr: 3.86e-03, grad_scale: 8.0 +2023-03-09 23:57:18,500 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.747e+02 2.425e+02 2.919e+02 3.520e+02 8.607e+02, threshold=5.838e+02, percent-clipped=3.0 +2023-03-09 23:57:21,234 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8863, 5.3587, 5.3457, 5.3630, 4.7633, 5.2738, 4.7640, 5.2518], + device='cuda:2'), covar=tensor([0.0227, 0.0256, 0.0188, 0.0384, 0.0428, 0.0222, 0.0983, 0.0281], + device='cuda:2'), in_proj_covar=tensor([0.0233, 0.0278, 0.0279, 0.0358, 0.0287, 0.0288, 0.0318, 0.0278], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-09 23:57:41,213 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104232.0, num_to_drop=1, layers_to_drop={0} +2023-03-09 23:58:05,928 INFO [train.py:898] (2/4) Epoch 29, batch 2500, loss[loss=0.1514, simple_loss=0.2411, pruned_loss=0.03085, over 18550.00 frames. ], tot_loss[loss=0.1562, simple_loss=0.2472, pruned_loss=0.03265, over 3578449.83 frames. ], batch size: 54, lr: 3.85e-03, grad_scale: 8.0 +2023-03-09 23:58:16,093 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.3752, 2.7393, 2.5949, 2.8072, 3.5125, 3.4736, 3.0385, 2.7125], + device='cuda:2'), covar=tensor([0.0245, 0.0323, 0.0558, 0.0439, 0.0209, 0.0180, 0.0440, 0.0418], + device='cuda:2'), in_proj_covar=tensor([0.0149, 0.0149, 0.0168, 0.0168, 0.0145, 0.0132, 0.0164, 0.0167], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-09 23:58:57,891 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7196, 3.5500, 2.5147, 4.5300, 3.3283, 4.3866, 2.8044, 4.1007], + device='cuda:2'), covar=tensor([0.0674, 0.0867, 0.1381, 0.0498, 0.0804, 0.0324, 0.1056, 0.0409], + device='cuda:2'), in_proj_covar=tensor([0.0225, 0.0235, 0.0197, 0.0301, 0.0200, 0.0274, 0.0209, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-09 23:59:04,436 INFO [train.py:898] (2/4) Epoch 29, batch 2550, loss[loss=0.1803, simple_loss=0.2753, pruned_loss=0.04262, over 18391.00 frames. ], tot_loss[loss=0.1564, simple_loss=0.2472, pruned_loss=0.03283, over 3586941.72 frames. ], batch size: 52, lr: 3.85e-03, grad_scale: 8.0 +2023-03-09 23:59:16,568 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.724e+02 2.479e+02 2.941e+02 3.574e+02 6.228e+02, threshold=5.882e+02, percent-clipped=1.0 +2023-03-09 23:59:29,734 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-03-09 23:59:55,465 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104346.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:00:03,140 INFO [train.py:898] (2/4) Epoch 29, batch 2600, loss[loss=0.1718, simple_loss=0.2669, pruned_loss=0.03838, over 18424.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2468, pruned_loss=0.03242, over 3589079.46 frames. ], batch size: 52, lr: 3.85e-03, grad_scale: 8.0 +2023-03-10 00:00:03,394 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104353.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:00:49,844 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7235, 5.1932, 5.2147, 5.2049, 4.6845, 5.1238, 4.6297, 5.0854], + device='cuda:2'), covar=tensor([0.0266, 0.0299, 0.0192, 0.0464, 0.0362, 0.0255, 0.1018, 0.0338], + device='cuda:2'), in_proj_covar=tensor([0.0236, 0.0280, 0.0281, 0.0360, 0.0288, 0.0290, 0.0320, 0.0280], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-10 00:00:54,765 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104396.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:01:03,255 INFO [train.py:898] (2/4) Epoch 29, batch 2650, loss[loss=0.1529, simple_loss=0.2437, pruned_loss=0.03103, over 18254.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2455, pruned_loss=0.03216, over 3589666.18 frames. ], batch size: 57, lr: 3.85e-03, grad_scale: 8.0 +2023-03-10 00:01:05,809 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104405.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:01:15,456 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.642e+02 2.433e+02 2.852e+02 3.546e+02 8.268e+02, threshold=5.705e+02, percent-clipped=2.0 +2023-03-10 00:01:24,960 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104421.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:01:28,755 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0416, 4.2381, 2.5674, 4.1867, 5.2914, 2.8625, 3.9628, 4.1036], + device='cuda:2'), covar=tensor([0.0186, 0.1368, 0.1588, 0.0630, 0.0104, 0.1133, 0.0676, 0.0723], + device='cuda:2'), in_proj_covar=tensor([0.0187, 0.0285, 0.0212, 0.0203, 0.0147, 0.0188, 0.0224, 0.0234], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-10 00:01:52,711 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104444.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:02:03,368 INFO [train.py:898] (2/4) Epoch 29, batch 2700, loss[loss=0.1341, simple_loss=0.2267, pruned_loss=0.02078, over 18482.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2454, pruned_loss=0.03186, over 3586136.06 frames. ], batch size: 47, lr: 3.85e-03, grad_scale: 8.0 +2023-03-10 00:02:22,292 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104469.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:03:01,350 INFO [train.py:898] (2/4) Epoch 29, batch 2750, loss[loss=0.1732, simple_loss=0.2639, pruned_loss=0.04128, over 17870.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2457, pruned_loss=0.032, over 3593975.23 frames. ], batch size: 70, lr: 3.85e-03, grad_scale: 8.0 +2023-03-10 00:03:12,786 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.672e+02 2.565e+02 3.025e+02 3.690e+02 7.436e+02, threshold=6.050e+02, percent-clipped=2.0 +2023-03-10 00:03:35,771 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104532.0, num_to_drop=1, layers_to_drop={2} +2023-03-10 00:03:59,995 INFO [train.py:898] (2/4) Epoch 29, batch 2800, loss[loss=0.166, simple_loss=0.2667, pruned_loss=0.03264, over 17916.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.2454, pruned_loss=0.03183, over 3604007.84 frames. ], batch size: 65, lr: 3.85e-03, grad_scale: 8.0 +2023-03-10 00:04:28,348 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4633, 5.3800, 5.7563, 5.8241, 5.4154, 6.3146, 5.9339, 5.5176], + device='cuda:2'), covar=tensor([0.1059, 0.0604, 0.0727, 0.0688, 0.1261, 0.0571, 0.0583, 0.1488], + device='cuda:2'), in_proj_covar=tensor([0.0378, 0.0307, 0.0338, 0.0339, 0.0345, 0.0451, 0.0306, 0.0444], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-10 00:04:31,629 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104580.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:04:45,763 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-03-10 00:04:58,843 INFO [train.py:898] (2/4) Epoch 29, batch 2850, loss[loss=0.1485, simple_loss=0.2379, pruned_loss=0.02958, over 18555.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.245, pruned_loss=0.03172, over 3606492.19 frames. ], batch size: 49, lr: 3.85e-03, grad_scale: 8.0 +2023-03-10 00:05:10,927 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.658e+02 2.500e+02 2.877e+02 3.433e+02 5.313e+02, threshold=5.755e+02, percent-clipped=0.0 +2023-03-10 00:05:21,187 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6507, 3.7197, 2.5368, 4.5297, 3.2441, 4.4047, 2.8687, 4.1970], + device='cuda:2'), covar=tensor([0.0824, 0.0812, 0.1369, 0.0574, 0.0843, 0.0422, 0.1072, 0.0400], + device='cuda:2'), in_proj_covar=tensor([0.0225, 0.0235, 0.0197, 0.0301, 0.0199, 0.0273, 0.0209, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-10 00:05:50,626 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104646.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:05:58,418 INFO [train.py:898] (2/4) Epoch 29, batch 2900, loss[loss=0.1372, simple_loss=0.2199, pruned_loss=0.02719, over 18379.00 frames. ], tot_loss[loss=0.1548, simple_loss=0.2457, pruned_loss=0.03189, over 3597231.81 frames. ], batch size: 42, lr: 3.85e-03, grad_scale: 8.0 +2023-03-10 00:05:58,755 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104653.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:06:47,693 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104694.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:06:56,052 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104701.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:06:58,035 INFO [train.py:898] (2/4) Epoch 29, batch 2950, loss[loss=0.1647, simple_loss=0.2567, pruned_loss=0.03636, over 18503.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2457, pruned_loss=0.03182, over 3590582.98 frames. ], batch size: 53, lr: 3.85e-03, grad_scale: 8.0 +2023-03-10 00:07:00,631 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104705.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:07:09,439 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.771e+02 2.535e+02 2.972e+02 3.582e+02 1.059e+03, threshold=5.945e+02, percent-clipped=4.0 +2023-03-10 00:07:28,604 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7802, 5.3632, 2.5496, 5.1938, 5.0877, 5.3488, 5.1597, 2.7716], + device='cuda:2'), covar=tensor([0.0271, 0.0058, 0.0888, 0.0073, 0.0072, 0.0065, 0.0086, 0.0985], + device='cuda:2'), in_proj_covar=tensor([0.0094, 0.0084, 0.0098, 0.0100, 0.0091, 0.0080, 0.0087, 0.0098], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-10 00:07:57,844 INFO [train.py:898] (2/4) Epoch 29, batch 3000, loss[loss=0.1612, simple_loss=0.2592, pruned_loss=0.03161, over 18358.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2457, pruned_loss=0.03174, over 3591786.86 frames. ], batch size: 55, lr: 3.85e-03, grad_scale: 8.0 +2023-03-10 00:07:57,844 INFO [train.py:923] (2/4) Computing validation loss +2023-03-10 00:08:10,053 INFO [train.py:932] (2/4) Epoch 29, validation: loss=0.1493, simple_loss=0.2471, pruned_loss=0.02574, over 944034.00 frames. +2023-03-10 00:08:10,054 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-10 00:08:10,338 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=104753.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:08:11,925 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.81 vs. limit=5.0 +2023-03-10 00:08:51,757 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8971, 3.6734, 5.2425, 3.0253, 4.5855, 2.7069, 3.1239, 1.9396], + device='cuda:2'), covar=tensor([0.1236, 0.1077, 0.0135, 0.1022, 0.0480, 0.2683, 0.2761, 0.2353], + device='cuda:2'), in_proj_covar=tensor([0.0235, 0.0256, 0.0234, 0.0210, 0.0268, 0.0284, 0.0341, 0.0249], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-10 00:08:54,894 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.4744, 3.8713, 2.3341, 3.6579, 4.7841, 2.5869, 3.5771, 3.6811], + device='cuda:2'), covar=tensor([0.0268, 0.1074, 0.1737, 0.0761, 0.0152, 0.1197, 0.0753, 0.0777], + device='cuda:2'), in_proj_covar=tensor([0.0188, 0.0287, 0.0212, 0.0204, 0.0148, 0.0188, 0.0225, 0.0234], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-10 00:09:09,608 INFO [train.py:898] (2/4) Epoch 29, batch 3050, loss[loss=0.152, simple_loss=0.2488, pruned_loss=0.0276, over 18499.00 frames. ], tot_loss[loss=0.1548, simple_loss=0.2458, pruned_loss=0.03187, over 3592596.48 frames. ], batch size: 51, lr: 3.84e-03, grad_scale: 8.0 +2023-03-10 00:09:13,247 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4948, 5.4411, 5.1091, 5.3803, 5.3939, 4.8200, 5.3292, 5.0044], + device='cuda:2'), covar=tensor([0.0414, 0.0448, 0.1250, 0.0852, 0.0613, 0.0401, 0.0410, 0.1193], + device='cuda:2'), in_proj_covar=tensor([0.0520, 0.0590, 0.0731, 0.0459, 0.0486, 0.0538, 0.0564, 0.0710], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-10 00:09:21,235 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.749e+02 2.540e+02 3.003e+02 3.449e+02 8.629e+02, threshold=6.007e+02, percent-clipped=3.0 +2023-03-10 00:09:38,410 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-03-10 00:09:38,968 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5925, 3.4748, 2.3457, 4.3654, 3.0380, 4.0392, 2.5628, 3.8406], + device='cuda:2'), covar=tensor([0.0669, 0.0805, 0.1482, 0.0524, 0.0881, 0.0398, 0.1173, 0.0447], + device='cuda:2'), in_proj_covar=tensor([0.0228, 0.0238, 0.0200, 0.0304, 0.0202, 0.0278, 0.0212, 0.0213], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-10 00:10:08,524 INFO [train.py:898] (2/4) Epoch 29, batch 3100, loss[loss=0.1625, simple_loss=0.2565, pruned_loss=0.03429, over 18049.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.2457, pruned_loss=0.03168, over 3605046.61 frames. ], batch size: 65, lr: 3.84e-03, grad_scale: 8.0 +2023-03-10 00:10:17,462 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7436, 3.0160, 4.4867, 3.8596, 2.8983, 4.6990, 3.8948, 3.0984], + device='cuda:2'), covar=tensor([0.0496, 0.1417, 0.0273, 0.0407, 0.1364, 0.0221, 0.0622, 0.0865], + device='cuda:2'), in_proj_covar=tensor([0.0226, 0.0250, 0.0239, 0.0176, 0.0231, 0.0223, 0.0264, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-10 00:11:07,596 INFO [train.py:898] (2/4) Epoch 29, batch 3150, loss[loss=0.1457, simple_loss=0.2351, pruned_loss=0.0282, over 18561.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.245, pruned_loss=0.03172, over 3601515.68 frames. ], batch size: 45, lr: 3.84e-03, grad_scale: 8.0 +2023-03-10 00:11:19,630 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.665e+02 2.384e+02 2.834e+02 3.322e+02 6.284e+02, threshold=5.669e+02, percent-clipped=1.0 +2023-03-10 00:12:06,377 INFO [train.py:898] (2/4) Epoch 29, batch 3200, loss[loss=0.1549, simple_loss=0.2559, pruned_loss=0.02693, over 18333.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.2449, pruned_loss=0.03173, over 3598969.99 frames. ], batch size: 54, lr: 3.84e-03, grad_scale: 8.0 +2023-03-10 00:12:07,189 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-03-10 00:13:02,426 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-03-10 00:13:06,506 INFO [train.py:898] (2/4) Epoch 29, batch 3250, loss[loss=0.156, simple_loss=0.2517, pruned_loss=0.03018, over 18569.00 frames. ], tot_loss[loss=0.1548, simple_loss=0.2457, pruned_loss=0.03196, over 3583889.97 frames. ], batch size: 54, lr: 3.84e-03, grad_scale: 8.0 +2023-03-10 00:13:17,606 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.817e+02 2.551e+02 3.060e+02 3.706e+02 7.132e+02, threshold=6.121e+02, percent-clipped=2.0 +2023-03-10 00:13:32,827 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-03-10 00:14:05,587 INFO [train.py:898] (2/4) Epoch 29, batch 3300, loss[loss=0.137, simple_loss=0.2276, pruned_loss=0.0232, over 18370.00 frames. ], tot_loss[loss=0.1548, simple_loss=0.2458, pruned_loss=0.03194, over 3576344.54 frames. ], batch size: 46, lr: 3.84e-03, grad_scale: 8.0 +2023-03-10 00:14:12,556 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6778, 3.0605, 2.7669, 3.0104, 3.7511, 3.6701, 3.2658, 3.0047], + device='cuda:2'), covar=tensor([0.0166, 0.0267, 0.0556, 0.0406, 0.0175, 0.0169, 0.0408, 0.0396], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0149, 0.0168, 0.0168, 0.0145, 0.0132, 0.0165, 0.0167], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-10 00:14:52,832 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2826, 5.2285, 5.5380, 5.5404, 5.2086, 6.0850, 5.7136, 5.3163], + device='cuda:2'), covar=tensor([0.1108, 0.0578, 0.0733, 0.0749, 0.1347, 0.0648, 0.0765, 0.1635], + device='cuda:2'), in_proj_covar=tensor([0.0376, 0.0308, 0.0336, 0.0340, 0.0344, 0.0448, 0.0305, 0.0442], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-10 00:15:04,546 INFO [train.py:898] (2/4) Epoch 29, batch 3350, loss[loss=0.1389, simple_loss=0.2239, pruned_loss=0.0269, over 18253.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.2455, pruned_loss=0.03179, over 3587311.47 frames. ], batch size: 45, lr: 3.84e-03, grad_scale: 8.0 +2023-03-10 00:15:10,971 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-03-10 00:15:15,600 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.933e+02 2.604e+02 2.932e+02 3.657e+02 1.346e+03, threshold=5.864e+02, percent-clipped=3.0 +2023-03-10 00:16:03,631 INFO [train.py:898] (2/4) Epoch 29, batch 3400, loss[loss=0.1741, simple_loss=0.2657, pruned_loss=0.04126, over 18193.00 frames. ], tot_loss[loss=0.1543, simple_loss=0.2452, pruned_loss=0.03173, over 3598972.37 frames. ], batch size: 60, lr: 3.84e-03, grad_scale: 8.0 +2023-03-10 00:16:15,805 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.02 vs. limit=5.0 +2023-03-10 00:16:53,029 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7485, 4.4288, 4.4099, 3.3115, 3.6482, 3.4106, 2.6477, 2.6271], + device='cuda:2'), covar=tensor([0.0234, 0.0132, 0.0081, 0.0322, 0.0331, 0.0238, 0.0673, 0.0773], + device='cuda:2'), in_proj_covar=tensor([0.0076, 0.0064, 0.0070, 0.0073, 0.0092, 0.0071, 0.0080, 0.0087], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006], + device='cuda:2') +2023-03-10 00:17:01,947 INFO [train.py:898] (2/4) Epoch 29, batch 3450, loss[loss=0.1579, simple_loss=0.2497, pruned_loss=0.03302, over 18255.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.2452, pruned_loss=0.03192, over 3598899.13 frames. ], batch size: 60, lr: 3.84e-03, grad_scale: 8.0 +2023-03-10 00:17:13,784 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.580e+02 2.383e+02 2.808e+02 3.318e+02 5.892e+02, threshold=5.615e+02, percent-clipped=1.0 +2023-03-10 00:17:43,050 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.5790, 4.6008, 4.6899, 4.4548, 4.4657, 4.4681, 4.7657, 4.7682], + device='cuda:2'), covar=tensor([0.0083, 0.0082, 0.0075, 0.0120, 0.0073, 0.0148, 0.0086, 0.0093], + device='cuda:2'), in_proj_covar=tensor([0.0100, 0.0074, 0.0080, 0.0100, 0.0079, 0.0107, 0.0091, 0.0091], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-10 00:18:00,550 INFO [train.py:898] (2/4) Epoch 29, batch 3500, loss[loss=0.1873, simple_loss=0.2627, pruned_loss=0.05596, over 12880.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.246, pruned_loss=0.03214, over 3586788.79 frames. ], batch size: 130, lr: 3.84e-03, grad_scale: 8.0 +2023-03-10 00:18:42,367 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7915, 4.5062, 4.5489, 3.4580, 3.7157, 3.4376, 2.6553, 2.7313], + device='cuda:2'), covar=tensor([0.0234, 0.0144, 0.0080, 0.0305, 0.0322, 0.0255, 0.0747, 0.0786], + device='cuda:2'), in_proj_covar=tensor([0.0077, 0.0065, 0.0071, 0.0073, 0.0093, 0.0072, 0.0080, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0006, 0.0005, 0.0005, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006], + device='cuda:2') +2023-03-10 00:18:55,809 INFO [train.py:898] (2/4) Epoch 29, batch 3550, loss[loss=0.1837, simple_loss=0.2771, pruned_loss=0.04516, over 17998.00 frames. ], tot_loss[loss=0.1558, simple_loss=0.2465, pruned_loss=0.03249, over 3592771.16 frames. ], batch size: 65, lr: 3.83e-03, grad_scale: 8.0 +2023-03-10 00:19:06,460 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.798e+02 2.434e+02 2.865e+02 3.612e+02 9.086e+02, threshold=5.730e+02, percent-clipped=2.0 +2023-03-10 00:19:43,157 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0290, 4.3489, 2.5509, 4.1090, 5.3852, 2.8615, 4.0995, 4.2335], + device='cuda:2'), covar=tensor([0.0215, 0.1235, 0.1675, 0.0705, 0.0104, 0.1164, 0.0626, 0.0659], + device='cuda:2'), in_proj_covar=tensor([0.0188, 0.0285, 0.0211, 0.0203, 0.0147, 0.0187, 0.0224, 0.0232], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-10 00:19:50,741 INFO [train.py:898] (2/4) Epoch 29, batch 3600, loss[loss=0.1503, simple_loss=0.2432, pruned_loss=0.02867, over 18309.00 frames. ], tot_loss[loss=0.1549, simple_loss=0.2453, pruned_loss=0.03223, over 3586197.55 frames. ], batch size: 54, lr: 3.83e-03, grad_scale: 8.0 +2023-03-10 00:20:04,158 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8564, 3.7460, 5.1601, 2.9878, 4.5758, 2.6974, 3.1008, 1.7957], + device='cuda:2'), covar=tensor([0.1287, 0.0959, 0.0189, 0.1069, 0.0489, 0.2888, 0.2992, 0.2475], + device='cuda:2'), in_proj_covar=tensor([0.0235, 0.0256, 0.0235, 0.0210, 0.0268, 0.0284, 0.0341, 0.0250], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-10 00:20:20,014 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-03-10 00:20:53,457 INFO [train.py:898] (2/4) Epoch 30, batch 0, loss[loss=0.1482, simple_loss=0.2413, pruned_loss=0.02757, over 18314.00 frames. ], tot_loss[loss=0.1482, simple_loss=0.2413, pruned_loss=0.02757, over 18314.00 frames. ], batch size: 54, lr: 3.77e-03, grad_scale: 8.0 +2023-03-10 00:20:53,458 INFO [train.py:923] (2/4) Computing validation loss +2023-03-10 00:21:02,392 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0663, 4.9623, 4.9925, 4.7174, 4.7152, 4.8099, 5.1283, 5.1066], + device='cuda:2'), covar=tensor([0.0100, 0.0078, 0.0075, 0.0153, 0.0076, 0.0169, 0.0120, 0.0149], + device='cuda:2'), in_proj_covar=tensor([0.0101, 0.0075, 0.0080, 0.0101, 0.0080, 0.0108, 0.0092, 0.0092], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-10 00:21:05,356 INFO [train.py:932] (2/4) Epoch 30, validation: loss=0.1503, simple_loss=0.2477, pruned_loss=0.02643, over 944034.00 frames. +2023-03-10 00:21:05,357 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-10 00:21:06,881 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=105388.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:21:11,618 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9167, 3.7201, 5.1474, 3.0819, 4.5875, 2.6863, 3.1864, 1.8731], + device='cuda:2'), covar=tensor([0.1253, 0.1059, 0.0227, 0.1029, 0.0488, 0.2865, 0.2964, 0.2454], + device='cuda:2'), in_proj_covar=tensor([0.0234, 0.0256, 0.0235, 0.0209, 0.0268, 0.0283, 0.0340, 0.0250], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-10 00:21:36,266 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.589e+02 2.529e+02 2.877e+02 3.490e+02 8.878e+02, threshold=5.754e+02, percent-clipped=2.0 +2023-03-10 00:22:04,711 INFO [train.py:898] (2/4) Epoch 30, batch 50, loss[loss=0.1711, simple_loss=0.2648, pruned_loss=0.03873, over 18572.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2445, pruned_loss=0.03183, over 800343.06 frames. ], batch size: 54, lr: 3.77e-03, grad_scale: 4.0 +2023-03-10 00:22:18,516 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=105449.0, num_to_drop=1, layers_to_drop={1} +2023-03-10 00:22:27,099 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.11 vs. limit=5.0 +2023-03-10 00:22:30,148 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0940, 5.1450, 5.3726, 5.4390, 5.0183, 5.8832, 5.5297, 5.1920], + device='cuda:2'), covar=tensor([0.1205, 0.0747, 0.0791, 0.0736, 0.1472, 0.0734, 0.0740, 0.1830], + device='cuda:2'), in_proj_covar=tensor([0.0377, 0.0309, 0.0337, 0.0340, 0.0343, 0.0451, 0.0305, 0.0443], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-10 00:23:03,216 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=105486.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:23:03,941 INFO [train.py:898] (2/4) Epoch 30, batch 100, loss[loss=0.1317, simple_loss=0.2195, pruned_loss=0.02195, over 18270.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.2448, pruned_loss=0.03173, over 1427107.17 frames. ], batch size: 45, lr: 3.77e-03, grad_scale: 2.0 +2023-03-10 00:23:21,401 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=105502.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:23:36,856 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.707e+02 2.490e+02 2.952e+02 3.451e+02 6.193e+02, threshold=5.904e+02, percent-clipped=1.0 +2023-03-10 00:23:50,629 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8412, 2.6129, 2.8436, 2.9258, 3.3753, 4.9094, 5.0529, 3.3840], + device='cuda:2'), covar=tensor([0.2077, 0.2549, 0.3071, 0.1922, 0.2469, 0.0287, 0.0302, 0.1160], + device='cuda:2'), in_proj_covar=tensor([0.0339, 0.0370, 0.0423, 0.0297, 0.0404, 0.0273, 0.0306, 0.0278], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-10 00:24:02,930 INFO [train.py:898] (2/4) Epoch 30, batch 150, loss[loss=0.1796, simple_loss=0.2672, pruned_loss=0.04604, over 18147.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2443, pruned_loss=0.03159, over 1907534.18 frames. ], batch size: 62, lr: 3.77e-03, grad_scale: 2.0 +2023-03-10 00:24:14,920 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=105547.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:24:23,138 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=2.87 vs. limit=5.0 +2023-03-10 00:24:33,655 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=105563.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:25:02,153 INFO [train.py:898] (2/4) Epoch 30, batch 200, loss[loss=0.142, simple_loss=0.2336, pruned_loss=0.02517, over 18380.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2446, pruned_loss=0.03182, over 2275052.72 frames. ], batch size: 50, lr: 3.76e-03, grad_scale: 2.0 +2023-03-10 00:25:06,956 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9195, 4.1724, 2.6100, 4.0420, 5.2816, 2.9453, 3.8921, 3.8645], + device='cuda:2'), covar=tensor([0.0204, 0.1317, 0.1417, 0.0622, 0.0068, 0.0940, 0.0601, 0.0741], + device='cuda:2'), in_proj_covar=tensor([0.0188, 0.0285, 0.0212, 0.0203, 0.0147, 0.0187, 0.0224, 0.0233], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-10 00:25:34,559 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.818e+02 2.661e+02 3.049e+02 3.613e+02 6.015e+02, threshold=6.098e+02, percent-clipped=1.0 +2023-03-10 00:25:34,981 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5260, 3.3828, 2.1515, 4.3573, 3.0890, 4.1197, 2.4559, 3.8406], + device='cuda:2'), covar=tensor([0.0745, 0.0951, 0.1667, 0.0552, 0.0909, 0.0340, 0.1310, 0.0455], + device='cuda:2'), in_proj_covar=tensor([0.0226, 0.0237, 0.0200, 0.0305, 0.0201, 0.0276, 0.0212, 0.0213], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-10 00:26:01,238 INFO [train.py:898] (2/4) Epoch 30, batch 250, loss[loss=0.1529, simple_loss=0.2531, pruned_loss=0.02639, over 18582.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2452, pruned_loss=0.03186, over 2565015.27 frames. ], batch size: 54, lr: 3.76e-03, grad_scale: 2.0 +2023-03-10 00:26:16,627 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-10 00:26:29,606 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8797, 5.2500, 2.5688, 5.0259, 4.8836, 5.2086, 4.9704, 2.2409], + device='cuda:2'), covar=tensor([0.0290, 0.0101, 0.1036, 0.0142, 0.0124, 0.0125, 0.0147, 0.1691], + device='cuda:2'), in_proj_covar=tensor([0.0093, 0.0083, 0.0097, 0.0099, 0.0090, 0.0080, 0.0087, 0.0098], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0004, 0.0005], + device='cuda:2') +2023-03-10 00:26:56,638 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8614, 4.6417, 4.6348, 3.6272, 3.7934, 3.5395, 2.9720, 2.8668], + device='cuda:2'), covar=tensor([0.0229, 0.0124, 0.0075, 0.0302, 0.0345, 0.0248, 0.0610, 0.0734], + device='cuda:2'), in_proj_covar=tensor([0.0076, 0.0064, 0.0071, 0.0073, 0.0093, 0.0072, 0.0079, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006], + device='cuda:2') +2023-03-10 00:26:59,687 INFO [train.py:898] (2/4) Epoch 30, batch 300, loss[loss=0.1424, simple_loss=0.2351, pruned_loss=0.02483, over 18537.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2453, pruned_loss=0.0317, over 2803299.13 frames. ], batch size: 49, lr: 3.76e-03, grad_scale: 2.0 +2023-03-10 00:27:07,407 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5146, 2.8771, 4.4001, 3.5733, 2.7301, 4.6126, 3.8831, 3.0249], + device='cuda:2'), covar=tensor([0.0655, 0.1524, 0.0291, 0.0562, 0.1526, 0.0229, 0.0577, 0.0949], + device='cuda:2'), in_proj_covar=tensor([0.0225, 0.0247, 0.0239, 0.0176, 0.0228, 0.0223, 0.0261, 0.0201], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-10 00:27:32,636 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.857e+02 2.459e+02 2.793e+02 3.329e+02 5.201e+02, threshold=5.586e+02, percent-clipped=0.0 +2023-03-10 00:27:58,847 INFO [train.py:898] (2/4) Epoch 30, batch 350, loss[loss=0.1449, simple_loss=0.239, pruned_loss=0.0254, over 18337.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.2453, pruned_loss=0.03158, over 2988696.68 frames. ], batch size: 55, lr: 3.76e-03, grad_scale: 2.0 +2023-03-10 00:28:08,222 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105744.0, num_to_drop=1, layers_to_drop={1} +2023-03-10 00:28:57,667 INFO [train.py:898] (2/4) Epoch 30, batch 400, loss[loss=0.1477, simple_loss=0.2437, pruned_loss=0.02589, over 18399.00 frames. ], tot_loss[loss=0.1535, simple_loss=0.2445, pruned_loss=0.03124, over 3134043.76 frames. ], batch size: 52, lr: 3.76e-03, grad_scale: 4.0 +2023-03-10 00:29:03,562 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=105792.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:29:12,610 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=105799.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:29:30,415 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.921e+02 2.502e+02 2.978e+02 3.750e+02 7.883e+02, threshold=5.955e+02, percent-clipped=2.0 +2023-03-10 00:29:35,933 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.75 vs. limit=5.0 +2023-03-10 00:29:47,185 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6711, 4.4501, 4.4282, 3.3994, 3.6272, 3.3318, 2.6487, 2.4470], + device='cuda:2'), covar=tensor([0.0257, 0.0147, 0.0087, 0.0329, 0.0352, 0.0268, 0.0754, 0.0941], + device='cuda:2'), in_proj_covar=tensor([0.0077, 0.0064, 0.0071, 0.0073, 0.0093, 0.0072, 0.0080, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0006, 0.0005, 0.0005, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006], + device='cuda:2') +2023-03-10 00:29:55,602 INFO [train.py:898] (2/4) Epoch 30, batch 450, loss[loss=0.1783, simple_loss=0.2658, pruned_loss=0.04536, over 18133.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2446, pruned_loss=0.03141, over 3234724.89 frames. ], batch size: 62, lr: 3.76e-03, grad_scale: 4.0 +2023-03-10 00:30:01,544 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105842.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:30:15,347 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=105853.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:30:20,942 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105858.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:30:23,547 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=105860.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:30:54,159 INFO [train.py:898] (2/4) Epoch 30, batch 500, loss[loss=0.1754, simple_loss=0.2747, pruned_loss=0.03806, over 18590.00 frames. ], tot_loss[loss=0.1529, simple_loss=0.2437, pruned_loss=0.03108, over 3318497.83 frames. ], batch size: 54, lr: 3.76e-03, grad_scale: 4.0 +2023-03-10 00:31:21,487 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0170, 4.9942, 4.7170, 4.9453, 4.9347, 4.4415, 4.8753, 4.6356], + device='cuda:2'), covar=tensor([0.0434, 0.0546, 0.1231, 0.0730, 0.0584, 0.0431, 0.0450, 0.1164], + device='cuda:2'), in_proj_covar=tensor([0.0519, 0.0588, 0.0725, 0.0456, 0.0491, 0.0535, 0.0566, 0.0707], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-10 00:31:25,882 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.730e+02 2.387e+02 2.835e+02 3.555e+02 6.672e+02, threshold=5.669e+02, percent-clipped=2.0 +2023-03-10 00:31:51,029 INFO [train.py:898] (2/4) Epoch 30, batch 550, loss[loss=0.1539, simple_loss=0.2498, pruned_loss=0.02906, over 18626.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2452, pruned_loss=0.03182, over 3371974.14 frames. ], batch size: 52, lr: 3.76e-03, grad_scale: 4.0 +2023-03-10 00:32:20,196 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6789, 4.7358, 4.7929, 4.4992, 4.4882, 4.5382, 4.8556, 4.8735], + device='cuda:2'), covar=tensor([0.0081, 0.0072, 0.0073, 0.0137, 0.0073, 0.0196, 0.0086, 0.0088], + device='cuda:2'), in_proj_covar=tensor([0.0103, 0.0077, 0.0082, 0.0103, 0.0082, 0.0112, 0.0094, 0.0094], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-10 00:32:47,910 INFO [train.py:898] (2/4) Epoch 30, batch 600, loss[loss=0.155, simple_loss=0.2521, pruned_loss=0.02897, over 17367.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2451, pruned_loss=0.03154, over 3425706.21 frames. ], batch size: 78, lr: 3.76e-03, grad_scale: 4.0 +2023-03-10 00:33:13,000 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-03-10 00:33:26,795 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.547e+02 2.468e+02 2.895e+02 3.494e+02 7.378e+02, threshold=5.790e+02, percent-clipped=2.0 +2023-03-10 00:33:38,568 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6726, 3.6672, 3.5165, 3.1711, 3.4013, 2.8622, 2.8189, 3.6491], + device='cuda:2'), covar=tensor([0.0075, 0.0093, 0.0097, 0.0156, 0.0116, 0.0199, 0.0224, 0.0074], + device='cuda:2'), in_proj_covar=tensor([0.0160, 0.0179, 0.0149, 0.0199, 0.0159, 0.0190, 0.0194, 0.0139], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-10 00:33:51,606 INFO [train.py:898] (2/4) Epoch 30, batch 650, loss[loss=0.1606, simple_loss=0.2514, pruned_loss=0.03484, over 18408.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2445, pruned_loss=0.03107, over 3465109.02 frames. ], batch size: 52, lr: 3.76e-03, grad_scale: 4.0 +2023-03-10 00:33:56,867 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-03-10 00:34:00,494 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106044.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:34:12,763 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4459, 5.3146, 5.7140, 5.7334, 5.3511, 6.2394, 5.8256, 5.4760], + device='cuda:2'), covar=tensor([0.1078, 0.0620, 0.0796, 0.0855, 0.1407, 0.0672, 0.0648, 0.1445], + device='cuda:2'), in_proj_covar=tensor([0.0379, 0.0309, 0.0337, 0.0341, 0.0347, 0.0449, 0.0306, 0.0445], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-10 00:34:38,675 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106077.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:34:49,502 INFO [train.py:898] (2/4) Epoch 30, batch 700, loss[loss=0.1567, simple_loss=0.253, pruned_loss=0.03022, over 17193.00 frames. ], tot_loss[loss=0.1535, simple_loss=0.2448, pruned_loss=0.03109, over 3498977.30 frames. ], batch size: 78, lr: 3.76e-03, grad_scale: 4.0 +2023-03-10 00:34:55,337 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106092.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:35:22,424 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.614e+02 2.517e+02 2.950e+02 3.495e+02 6.202e+02, threshold=5.900e+02, percent-clipped=1.0 +2023-03-10 00:35:48,488 INFO [train.py:898] (2/4) Epoch 30, batch 750, loss[loss=0.1587, simple_loss=0.2468, pruned_loss=0.03529, over 18301.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2444, pruned_loss=0.03106, over 3519577.20 frames. ], batch size: 49, lr: 3.75e-03, grad_scale: 4.0 +2023-03-10 00:35:50,031 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106138.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:35:54,430 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106142.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:36:02,111 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106148.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:36:10,022 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106155.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:36:13,395 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106158.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:36:39,938 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-03-10 00:36:46,215 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7150, 3.6396, 4.9410, 4.4764, 3.4335, 2.9794, 4.4461, 5.2190], + device='cuda:2'), covar=tensor([0.0843, 0.1439, 0.0241, 0.0406, 0.0935, 0.1223, 0.0405, 0.0331], + device='cuda:2'), in_proj_covar=tensor([0.0157, 0.0289, 0.0180, 0.0190, 0.0201, 0.0198, 0.0206, 0.0219], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-10 00:36:46,890 INFO [train.py:898] (2/4) Epoch 30, batch 800, loss[loss=0.1399, simple_loss=0.2339, pruned_loss=0.02296, over 18400.00 frames. ], tot_loss[loss=0.1532, simple_loss=0.2444, pruned_loss=0.03101, over 3536419.16 frames. ], batch size: 50, lr: 3.75e-03, grad_scale: 8.0 +2023-03-10 00:36:50,259 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106190.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:37:09,721 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106206.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:37:19,671 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.545e+02 2.490e+02 2.877e+02 3.308e+02 6.408e+02, threshold=5.753e+02, percent-clipped=1.0 +2023-03-10 00:37:45,817 INFO [train.py:898] (2/4) Epoch 30, batch 850, loss[loss=0.1451, simple_loss=0.2363, pruned_loss=0.02697, over 18297.00 frames. ], tot_loss[loss=0.1529, simple_loss=0.2442, pruned_loss=0.03078, over 3546028.15 frames. ], batch size: 49, lr: 3.75e-03, grad_scale: 8.0 +2023-03-10 00:38:11,728 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4881, 3.2927, 2.2586, 4.2970, 2.9224, 3.9992, 2.5615, 3.8547], + device='cuda:2'), covar=tensor([0.0709, 0.0936, 0.1523, 0.0502, 0.0966, 0.0301, 0.1243, 0.0431], + device='cuda:2'), in_proj_covar=tensor([0.0223, 0.0234, 0.0197, 0.0300, 0.0198, 0.0272, 0.0208, 0.0210], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-10 00:38:21,653 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106268.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:38:42,951 INFO [train.py:898] (2/4) Epoch 30, batch 900, loss[loss=0.1437, simple_loss=0.2277, pruned_loss=0.02979, over 18260.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2452, pruned_loss=0.03118, over 3558687.65 frames. ], batch size: 47, lr: 3.75e-03, grad_scale: 8.0 +2023-03-10 00:39:08,420 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7313, 3.9787, 2.2162, 3.9599, 5.0975, 2.4677, 3.8104, 3.9327], + device='cuda:2'), covar=tensor([0.0220, 0.1254, 0.1746, 0.0635, 0.0118, 0.1292, 0.0691, 0.0743], + device='cuda:2'), in_proj_covar=tensor([0.0187, 0.0286, 0.0212, 0.0203, 0.0148, 0.0187, 0.0224, 0.0233], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-10 00:39:15,256 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.738e+02 2.494e+02 3.069e+02 3.732e+02 1.058e+03, threshold=6.138e+02, percent-clipped=4.0 +2023-03-10 00:39:30,917 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-03-10 00:39:31,495 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106329.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:39:39,790 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106336.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:39:40,577 INFO [train.py:898] (2/4) Epoch 30, batch 950, loss[loss=0.1672, simple_loss=0.2604, pruned_loss=0.03701, over 17117.00 frames. ], tot_loss[loss=0.1527, simple_loss=0.2442, pruned_loss=0.03065, over 3574460.17 frames. ], batch size: 78, lr: 3.75e-03, grad_scale: 8.0 +2023-03-10 00:40:39,192 INFO [train.py:898] (2/4) Epoch 30, batch 1000, loss[loss=0.1493, simple_loss=0.2363, pruned_loss=0.03112, over 18396.00 frames. ], tot_loss[loss=0.1523, simple_loss=0.2432, pruned_loss=0.03068, over 3579554.58 frames. ], batch size: 48, lr: 3.75e-03, grad_scale: 8.0 +2023-03-10 00:40:49,581 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9288, 4.2094, 2.4408, 4.1810, 5.2857, 2.6695, 3.9889, 4.0732], + device='cuda:2'), covar=tensor([0.0203, 0.1119, 0.1642, 0.0607, 0.0094, 0.1190, 0.0628, 0.0745], + device='cuda:2'), in_proj_covar=tensor([0.0189, 0.0288, 0.0213, 0.0205, 0.0149, 0.0188, 0.0226, 0.0235], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-10 00:40:50,721 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106397.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:41:11,525 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.702e+02 2.421e+02 2.824e+02 3.495e+02 5.852e+02, threshold=5.647e+02, percent-clipped=0.0 +2023-03-10 00:41:32,639 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106433.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:41:37,610 INFO [train.py:898] (2/4) Epoch 30, batch 1050, loss[loss=0.1339, simple_loss=0.2191, pruned_loss=0.02431, over 17689.00 frames. ], tot_loss[loss=0.1523, simple_loss=0.2432, pruned_loss=0.03067, over 3585581.02 frames. ], batch size: 39, lr: 3.75e-03, grad_scale: 8.0 +2023-03-10 00:41:50,961 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106448.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:41:55,026 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-03-10 00:41:57,861 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106454.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:41:58,996 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106455.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:42:35,944 INFO [train.py:898] (2/4) Epoch 30, batch 1100, loss[loss=0.1441, simple_loss=0.2346, pruned_loss=0.02682, over 18290.00 frames. ], tot_loss[loss=0.1531, simple_loss=0.2446, pruned_loss=0.03087, over 3575562.43 frames. ], batch size: 49, lr: 3.75e-03, grad_scale: 8.0 +2023-03-10 00:42:46,776 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106496.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:42:54,828 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106503.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:43:08,575 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.832e+02 2.448e+02 2.852e+02 3.322e+02 5.869e+02, threshold=5.705e+02, percent-clipped=1.0 +2023-03-10 00:43:09,027 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106515.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:43:33,595 INFO [train.py:898] (2/4) Epoch 30, batch 1150, loss[loss=0.1577, simple_loss=0.2501, pruned_loss=0.03264, over 18616.00 frames. ], tot_loss[loss=0.153, simple_loss=0.2443, pruned_loss=0.0308, over 3596795.16 frames. ], batch size: 52, lr: 3.75e-03, grad_scale: 8.0 +2023-03-10 00:43:51,979 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9545, 3.7300, 5.0898, 4.4181, 3.3613, 3.0593, 4.4840, 5.2959], + device='cuda:2'), covar=tensor([0.0707, 0.1305, 0.0197, 0.0383, 0.0956, 0.1155, 0.0379, 0.0187], + device='cuda:2'), in_proj_covar=tensor([0.0157, 0.0288, 0.0180, 0.0190, 0.0202, 0.0198, 0.0205, 0.0219], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-10 00:44:32,011 INFO [train.py:898] (2/4) Epoch 30, batch 1200, loss[loss=0.1429, simple_loss=0.2341, pruned_loss=0.02584, over 18277.00 frames. ], tot_loss[loss=0.153, simple_loss=0.2442, pruned_loss=0.03091, over 3589499.30 frames. ], batch size: 45, lr: 3.75e-03, grad_scale: 8.0 +2023-03-10 00:45:04,662 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.878e+02 2.700e+02 3.058e+02 3.812e+02 6.890e+02, threshold=6.116e+02, percent-clipped=3.0 +2023-03-10 00:45:15,359 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106624.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:45:30,329 INFO [train.py:898] (2/4) Epoch 30, batch 1250, loss[loss=0.1593, simple_loss=0.2541, pruned_loss=0.03226, over 16136.00 frames. ], tot_loss[loss=0.1529, simple_loss=0.2443, pruned_loss=0.03078, over 3598064.23 frames. ], batch size: 94, lr: 3.75e-03, grad_scale: 8.0 +2023-03-10 00:45:55,835 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7599, 4.5007, 4.4912, 3.4720, 3.7117, 3.4523, 2.6770, 2.6062], + device='cuda:2'), covar=tensor([0.0231, 0.0151, 0.0081, 0.0306, 0.0323, 0.0248, 0.0690, 0.0799], + device='cuda:2'), in_proj_covar=tensor([0.0077, 0.0064, 0.0071, 0.0073, 0.0092, 0.0071, 0.0079, 0.0087], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0005, 0.0005, 0.0005, 0.0006, 0.0005, 0.0006, 0.0006], + device='cuda:2') +2023-03-10 00:46:00,766 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-03-10 00:46:28,936 INFO [train.py:898] (2/4) Epoch 30, batch 1300, loss[loss=0.1533, simple_loss=0.2514, pruned_loss=0.02761, over 18502.00 frames. ], tot_loss[loss=0.1534, simple_loss=0.245, pruned_loss=0.03095, over 3593131.49 frames. ], batch size: 51, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:46:34,896 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106692.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:46:44,260 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.4979, 2.1051, 2.0756, 2.1462, 2.4490, 2.4738, 2.3833, 2.1184], + device='cuda:2'), covar=tensor([0.0261, 0.0265, 0.0474, 0.0441, 0.0255, 0.0228, 0.0425, 0.0329], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0152, 0.0170, 0.0170, 0.0146, 0.0133, 0.0165, 0.0168], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-10 00:47:01,418 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.810e+02 2.359e+02 2.806e+02 3.557e+02 5.057e+02, threshold=5.611e+02, percent-clipped=0.0 +2023-03-10 00:47:22,152 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106733.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:47:26,895 INFO [train.py:898] (2/4) Epoch 30, batch 1350, loss[loss=0.135, simple_loss=0.2235, pruned_loss=0.02328, over 18568.00 frames. ], tot_loss[loss=0.1529, simple_loss=0.2443, pruned_loss=0.03079, over 3600670.43 frames. ], batch size: 45, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:47:27,384 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.5189, 2.3022, 2.4775, 2.5615, 2.8971, 4.2477, 4.2881, 3.0972], + device='cuda:2'), covar=tensor([0.2253, 0.2755, 0.3354, 0.2134, 0.2889, 0.0418, 0.0435, 0.1111], + device='cuda:2'), in_proj_covar=tensor([0.0339, 0.0367, 0.0422, 0.0294, 0.0402, 0.0272, 0.0305, 0.0278], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-10 00:48:18,858 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106781.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:48:25,460 INFO [train.py:898] (2/4) Epoch 30, batch 1400, loss[loss=0.1689, simple_loss=0.2632, pruned_loss=0.03725, over 17989.00 frames. ], tot_loss[loss=0.1525, simple_loss=0.2435, pruned_loss=0.03074, over 3604111.17 frames. ], batch size: 65, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:48:45,024 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-03-10 00:48:52,879 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106810.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:48:58,341 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.725e+02 2.645e+02 3.103e+02 3.852e+02 9.390e+02, threshold=6.206e+02, percent-clipped=1.0 +2023-03-10 00:49:00,945 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.2534, 4.1794, 4.0405, 4.1677, 4.2044, 3.7783, 4.1814, 4.0297], + device='cuda:2'), covar=tensor([0.0490, 0.0841, 0.1231, 0.0757, 0.0673, 0.0469, 0.0503, 0.1058], + device='cuda:2'), in_proj_covar=tensor([0.0529, 0.0601, 0.0742, 0.0465, 0.0500, 0.0546, 0.0580, 0.0724], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0005, 0.0005, 0.0006], + device='cuda:2') +2023-03-10 00:49:23,854 INFO [train.py:898] (2/4) Epoch 30, batch 1450, loss[loss=0.147, simple_loss=0.2328, pruned_loss=0.03058, over 18257.00 frames. ], tot_loss[loss=0.1527, simple_loss=0.2439, pruned_loss=0.03077, over 3610422.47 frames. ], batch size: 47, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:49:54,998 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.8112, 4.1756, 2.5620, 3.9775, 5.2279, 2.5433, 3.7869, 4.0467], + device='cuda:2'), covar=tensor([0.0228, 0.1247, 0.1541, 0.0682, 0.0104, 0.1203, 0.0715, 0.0709], + device='cuda:2'), in_proj_covar=tensor([0.0188, 0.0286, 0.0212, 0.0204, 0.0148, 0.0187, 0.0225, 0.0233], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-10 00:50:22,128 INFO [train.py:898] (2/4) Epoch 30, batch 1500, loss[loss=0.1821, simple_loss=0.2717, pruned_loss=0.04625, over 17846.00 frames. ], tot_loss[loss=0.1529, simple_loss=0.2438, pruned_loss=0.03099, over 3597950.30 frames. ], batch size: 70, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:50:55,410 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.852e+02 2.528e+02 2.938e+02 3.558e+02 8.053e+02, threshold=5.876e+02, percent-clipped=1.0 +2023-03-10 00:51:05,848 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106924.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:51:20,639 INFO [train.py:898] (2/4) Epoch 30, batch 1550, loss[loss=0.1279, simple_loss=0.2106, pruned_loss=0.02261, over 18416.00 frames. ], tot_loss[loss=0.1531, simple_loss=0.2438, pruned_loss=0.03118, over 3602347.66 frames. ], batch size: 43, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:51:30,554 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8069, 3.6393, 5.2841, 3.4099, 4.4705, 2.6463, 3.3112, 1.7199], + device='cuda:2'), covar=tensor([0.1304, 0.0981, 0.0144, 0.0816, 0.0516, 0.2499, 0.2452, 0.2391], + device='cuda:2'), in_proj_covar=tensor([0.0235, 0.0258, 0.0236, 0.0211, 0.0269, 0.0284, 0.0343, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-10 00:52:01,403 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=106972.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:52:18,634 INFO [train.py:898] (2/4) Epoch 30, batch 1600, loss[loss=0.1355, simple_loss=0.2214, pruned_loss=0.02482, over 18270.00 frames. ], tot_loss[loss=0.1528, simple_loss=0.2433, pruned_loss=0.0312, over 3593076.23 frames. ], batch size: 45, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:52:20,112 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.4158, 2.7517, 4.0787, 3.5032, 2.5307, 4.2743, 3.7419, 2.7273], + device='cuda:2'), covar=tensor([0.0659, 0.1673, 0.0349, 0.0552, 0.1760, 0.0321, 0.0707, 0.1083], + device='cuda:2'), in_proj_covar=tensor([0.0224, 0.0246, 0.0239, 0.0174, 0.0226, 0.0221, 0.0259, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-10 00:52:24,690 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106992.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:52:50,787 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.531e+02 2.519e+02 3.033e+02 3.655e+02 1.046e+03, threshold=6.066e+02, percent-clipped=4.0 +2023-03-10 00:53:16,396 INFO [train.py:898] (2/4) Epoch 30, batch 1650, loss[loss=0.1569, simple_loss=0.2463, pruned_loss=0.03379, over 17789.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2444, pruned_loss=0.03165, over 3579814.36 frames. ], batch size: 70, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:53:20,455 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=107040.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:54:14,367 INFO [train.py:898] (2/4) Epoch 30, batch 1700, loss[loss=0.1244, simple_loss=0.21, pruned_loss=0.01942, over 18401.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.2447, pruned_loss=0.0318, over 3569710.68 frames. ], batch size: 42, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:54:26,705 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7297, 2.3644, 2.6442, 2.7265, 3.1022, 4.7511, 4.7912, 3.1467], + device='cuda:2'), covar=tensor([0.2248, 0.2800, 0.3261, 0.2058, 0.2743, 0.0316, 0.0347, 0.1235], + device='cuda:2'), in_proj_covar=tensor([0.0340, 0.0369, 0.0424, 0.0295, 0.0403, 0.0273, 0.0307, 0.0279], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-10 00:54:41,447 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=107110.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:54:46,861 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.859e+02 2.555e+02 2.944e+02 3.831e+02 7.585e+02, threshold=5.887e+02, percent-clipped=1.0 +2023-03-10 00:55:12,545 INFO [train.py:898] (2/4) Epoch 30, batch 1750, loss[loss=0.135, simple_loss=0.2201, pruned_loss=0.02497, over 18412.00 frames. ], tot_loss[loss=0.1535, simple_loss=0.2441, pruned_loss=0.03142, over 3575638.04 frames. ], batch size: 42, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:55:17,046 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107140.0, num_to_drop=1, layers_to_drop={0} +2023-03-10 00:55:30,586 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107152.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:55:37,719 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=107158.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 00:56:12,004 INFO [train.py:898] (2/4) Epoch 30, batch 1800, loss[loss=0.1369, simple_loss=0.2195, pruned_loss=0.02716, over 17231.00 frames. ], tot_loss[loss=0.1538, simple_loss=0.2448, pruned_loss=0.03141, over 3568336.03 frames. ], batch size: 38, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:56:29,002 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107201.0, num_to_drop=1, layers_to_drop={2} +2023-03-10 00:56:43,072 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107213.0, num_to_drop=1, layers_to_drop={0} +2023-03-10 00:56:44,848 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.514e+02 2.402e+02 2.925e+02 3.436e+02 5.429e+02, threshold=5.850e+02, percent-clipped=0.0 +2023-03-10 00:57:09,815 INFO [train.py:898] (2/4) Epoch 30, batch 1850, loss[loss=0.1401, simple_loss=0.2288, pruned_loss=0.02569, over 18549.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2456, pruned_loss=0.03166, over 3568961.89 frames. ], batch size: 49, lr: 3.74e-03, grad_scale: 8.0 +2023-03-10 00:57:21,897 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.6769, 5.6360, 5.2834, 5.5924, 5.5657, 4.9812, 5.4913, 5.2424], + device='cuda:2'), covar=tensor([0.0407, 0.0413, 0.1217, 0.0764, 0.0545, 0.0376, 0.0412, 0.1046], + device='cuda:2'), in_proj_covar=tensor([0.0525, 0.0595, 0.0738, 0.0464, 0.0498, 0.0541, 0.0573, 0.0718], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-10 00:58:08,615 INFO [train.py:898] (2/4) Epoch 30, batch 1900, loss[loss=0.1672, simple_loss=0.2624, pruned_loss=0.03602, over 18573.00 frames. ], tot_loss[loss=0.154, simple_loss=0.245, pruned_loss=0.0315, over 3575246.22 frames. ], batch size: 54, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 00:58:37,325 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-03-10 00:58:41,095 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.738e+02 2.430e+02 2.951e+02 3.799e+02 8.497e+02, threshold=5.903e+02, percent-clipped=5.0 +2023-03-10 00:58:52,881 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.6382, 2.3171, 2.4873, 2.7296, 3.0712, 4.8342, 4.8297, 3.2718], + device='cuda:2'), covar=tensor([0.2400, 0.3238, 0.3801, 0.2240, 0.3291, 0.0329, 0.0367, 0.1139], + device='cuda:2'), in_proj_covar=tensor([0.0338, 0.0367, 0.0422, 0.0294, 0.0400, 0.0271, 0.0305, 0.0277], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0001, 0.0002], + device='cuda:2') +2023-03-10 00:59:07,027 INFO [train.py:898] (2/4) Epoch 30, batch 1950, loss[loss=0.157, simple_loss=0.2538, pruned_loss=0.03007, over 18128.00 frames. ], tot_loss[loss=0.1535, simple_loss=0.2444, pruned_loss=0.03127, over 3585315.70 frames. ], batch size: 62, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 00:59:09,637 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.6474, 3.0980, 4.4415, 3.7558, 2.7510, 4.7179, 3.9336, 3.1306], + device='cuda:2'), covar=tensor([0.0559, 0.1346, 0.0303, 0.0497, 0.1577, 0.0229, 0.0592, 0.0872], + device='cuda:2'), in_proj_covar=tensor([0.0225, 0.0245, 0.0239, 0.0175, 0.0226, 0.0220, 0.0259, 0.0199], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-10 01:00:04,529 INFO [train.py:898] (2/4) Epoch 30, batch 2000, loss[loss=0.1596, simple_loss=0.2531, pruned_loss=0.03309, over 18414.00 frames. ], tot_loss[loss=0.1535, simple_loss=0.2443, pruned_loss=0.03133, over 3586227.76 frames. ], batch size: 52, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 01:00:38,311 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.734e+02 2.377e+02 2.711e+02 3.457e+02 7.129e+02, threshold=5.422e+02, percent-clipped=2.0 +2023-03-10 01:01:03,315 INFO [train.py:898] (2/4) Epoch 30, batch 2050, loss[loss=0.1564, simple_loss=0.2513, pruned_loss=0.03072, over 18195.00 frames. ], tot_loss[loss=0.1532, simple_loss=0.2439, pruned_loss=0.03123, over 3585648.38 frames. ], batch size: 60, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 01:01:53,116 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1043, 3.8703, 5.3322, 2.9657, 4.5859, 2.7038, 3.2538, 1.9196], + device='cuda:2'), covar=tensor([0.1162, 0.0969, 0.0138, 0.1037, 0.0520, 0.2685, 0.2671, 0.2271], + device='cuda:2'), in_proj_covar=tensor([0.0235, 0.0257, 0.0236, 0.0211, 0.0270, 0.0286, 0.0342, 0.0251], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-10 01:02:02,304 INFO [train.py:898] (2/4) Epoch 30, batch 2100, loss[loss=0.1904, simple_loss=0.277, pruned_loss=0.05188, over 18013.00 frames. ], tot_loss[loss=0.153, simple_loss=0.2438, pruned_loss=0.03108, over 3593426.50 frames. ], batch size: 65, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 01:02:13,471 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107496.0, num_to_drop=1, layers_to_drop={2} +2023-03-10 01:02:27,089 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107508.0, num_to_drop=1, layers_to_drop={0} +2023-03-10 01:02:29,426 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107510.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:02:36,353 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.605e+02 2.608e+02 2.930e+02 3.296e+02 8.592e+02, threshold=5.861e+02, percent-clipped=2.0 +2023-03-10 01:02:54,050 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.4295, 5.4102, 5.0373, 5.3424, 5.3554, 4.7659, 5.2525, 4.9915], + device='cuda:2'), covar=tensor([0.0457, 0.0461, 0.1309, 0.0818, 0.0575, 0.0416, 0.0451, 0.1238], + device='cuda:2'), in_proj_covar=tensor([0.0526, 0.0596, 0.0737, 0.0464, 0.0497, 0.0542, 0.0577, 0.0721], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0004, 0.0005, 0.0004, 0.0005, 0.0006], + device='cuda:2') +2023-03-10 01:03:00,491 INFO [train.py:898] (2/4) Epoch 30, batch 2150, loss[loss=0.128, simple_loss=0.2123, pruned_loss=0.02181, over 18500.00 frames. ], tot_loss[loss=0.1532, simple_loss=0.244, pruned_loss=0.03117, over 3601679.38 frames. ], batch size: 44, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 01:03:40,494 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107571.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:03:44,435 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.7564, 5.2999, 5.2850, 5.2563, 4.8263, 5.1953, 4.7018, 5.1694], + device='cuda:2'), covar=tensor([0.0258, 0.0268, 0.0177, 0.0435, 0.0332, 0.0211, 0.0952, 0.0325], + device='cuda:2'), in_proj_covar=tensor([0.0235, 0.0282, 0.0283, 0.0363, 0.0289, 0.0291, 0.0324, 0.0282], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-10 01:03:58,554 INFO [train.py:898] (2/4) Epoch 30, batch 2200, loss[loss=0.1656, simple_loss=0.2603, pruned_loss=0.03544, over 18474.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.2455, pruned_loss=0.03176, over 3591491.32 frames. ], batch size: 59, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 01:04:32,918 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.698e+02 2.424e+02 2.775e+02 3.608e+02 1.106e+03, threshold=5.550e+02, percent-clipped=4.0 +2023-03-10 01:04:56,701 INFO [train.py:898] (2/4) Epoch 30, batch 2250, loss[loss=0.136, simple_loss=0.2219, pruned_loss=0.02502, over 18501.00 frames. ], tot_loss[loss=0.1543, simple_loss=0.2451, pruned_loss=0.03178, over 3587912.51 frames. ], batch size: 44, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 01:05:22,638 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107659.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:05:54,196 INFO [train.py:898] (2/4) Epoch 30, batch 2300, loss[loss=0.1592, simple_loss=0.2491, pruned_loss=0.03458, over 18482.00 frames. ], tot_loss[loss=0.1551, simple_loss=0.246, pruned_loss=0.0321, over 3596531.37 frames. ], batch size: 51, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 01:06:05,934 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.5854, 5.3648, 5.8350, 5.7960, 5.4050, 6.3373, 5.9731, 5.6151], + device='cuda:2'), covar=tensor([0.1041, 0.0627, 0.0762, 0.0863, 0.1351, 0.0621, 0.0682, 0.1624], + device='cuda:2'), in_proj_covar=tensor([0.0380, 0.0309, 0.0338, 0.0343, 0.0343, 0.0451, 0.0304, 0.0447], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-10 01:06:27,334 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.765e+02 2.455e+02 2.885e+02 3.451e+02 1.072e+03, threshold=5.770e+02, percent-clipped=4.0 +2023-03-10 01:06:32,256 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107720.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:06:52,535 INFO [train.py:898] (2/4) Epoch 30, batch 2350, loss[loss=0.1568, simple_loss=0.2505, pruned_loss=0.03159, over 17946.00 frames. ], tot_loss[loss=0.1554, simple_loss=0.2463, pruned_loss=0.03225, over 3598734.25 frames. ], batch size: 65, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 01:06:52,849 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107737.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:07:51,010 INFO [train.py:898] (2/4) Epoch 30, batch 2400, loss[loss=0.1385, simple_loss=0.227, pruned_loss=0.02501, over 18351.00 frames. ], tot_loss[loss=0.1543, simple_loss=0.2452, pruned_loss=0.03174, over 3603302.23 frames. ], batch size: 46, lr: 3.73e-03, grad_scale: 8.0 +2023-03-10 01:08:01,204 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=107796.0, num_to_drop=1, layers_to_drop={2} +2023-03-10 01:08:03,512 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107798.0, num_to_drop=1, layers_to_drop={3} +2023-03-10 01:08:14,960 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=107808.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:08:24,298 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.608e+02 2.477e+02 2.910e+02 3.480e+02 7.076e+02, threshold=5.821e+02, percent-clipped=2.0 +2023-03-10 01:08:28,941 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.9010, 5.4281, 5.4005, 5.3926, 4.9185, 5.3050, 4.7718, 5.2818], + device='cuda:2'), covar=tensor([0.0269, 0.0284, 0.0191, 0.0412, 0.0358, 0.0230, 0.1097, 0.0330], + device='cuda:2'), in_proj_covar=tensor([0.0237, 0.0285, 0.0286, 0.0367, 0.0292, 0.0293, 0.0325, 0.0285], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-10 01:08:48,116 INFO [train.py:898] (2/4) Epoch 30, batch 2450, loss[loss=0.1555, simple_loss=0.2479, pruned_loss=0.0316, over 18462.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2453, pruned_loss=0.03191, over 3603366.27 frames. ], batch size: 59, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:08:51,492 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8501, 3.7223, 5.3176, 3.3857, 4.6154, 2.6935, 3.1826, 1.7986], + device='cuda:2'), covar=tensor([0.1325, 0.1057, 0.0143, 0.0852, 0.0512, 0.2610, 0.2793, 0.2439], + device='cuda:2'), in_proj_covar=tensor([0.0235, 0.0258, 0.0238, 0.0212, 0.0270, 0.0286, 0.0343, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-10 01:08:56,671 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=107844.0, num_to_drop=1, layers_to_drop={0} +2023-03-10 01:09:01,177 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.6563, 6.2311, 5.7222, 5.9753, 5.8499, 5.5482, 6.2388, 6.1905], + device='cuda:2'), covar=tensor([0.1134, 0.0722, 0.0443, 0.0695, 0.1332, 0.0720, 0.0646, 0.0653], + device='cuda:2'), in_proj_covar=tensor([0.0649, 0.0576, 0.0411, 0.0597, 0.0796, 0.0593, 0.0811, 0.0629], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0004, 0.0003, 0.0004, 0.0005, 0.0004, 0.0006, 0.0004], + device='cuda:2') +2023-03-10 01:09:04,775 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.9671, 5.1790, 2.5631, 5.0681, 4.8692, 5.1762, 4.9394, 2.3383], + device='cuda:2'), covar=tensor([0.0262, 0.0127, 0.1063, 0.0123, 0.0113, 0.0139, 0.0152, 0.1646], + device='cuda:2'), in_proj_covar=tensor([0.0095, 0.0085, 0.0099, 0.0101, 0.0091, 0.0081, 0.0088, 0.0099], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-10 01:09:06,278 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=4.37 vs. limit=5.0 +2023-03-10 01:09:10,505 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=107856.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:09:18,705 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7855, 3.6730, 4.9551, 4.2896, 3.4019, 2.9790, 4.4377, 5.2227], + device='cuda:2'), covar=tensor([0.0819, 0.1406, 0.0219, 0.0421, 0.0937, 0.1263, 0.0409, 0.0227], + device='cuda:2'), in_proj_covar=tensor([0.0157, 0.0288, 0.0181, 0.0190, 0.0201, 0.0200, 0.0206, 0.0219], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0004, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-10 01:09:21,763 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107866.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:09:46,147 INFO [train.py:898] (2/4) Epoch 30, batch 2500, loss[loss=0.1502, simple_loss=0.242, pruned_loss=0.02917, over 18278.00 frames. ], tot_loss[loss=0.1542, simple_loss=0.2448, pruned_loss=0.0318, over 3601832.67 frames. ], batch size: 49, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:10:19,340 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.763e+02 2.392e+02 2.985e+02 3.620e+02 9.433e+02, threshold=5.970e+02, percent-clipped=3.0 +2023-03-10 01:10:44,308 INFO [train.py:898] (2/4) Epoch 30, batch 2550, loss[loss=0.15, simple_loss=0.2393, pruned_loss=0.03034, over 18379.00 frames. ], tot_loss[loss=0.1546, simple_loss=0.2455, pruned_loss=0.03187, over 3604993.20 frames. ], batch size: 50, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:11:15,951 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.0001, 3.7407, 5.2461, 3.0644, 4.5417, 2.6839, 3.2995, 1.8525], + device='cuda:2'), covar=tensor([0.1234, 0.0993, 0.0153, 0.1044, 0.0533, 0.2879, 0.2697, 0.2368], + device='cuda:2'), in_proj_covar=tensor([0.0236, 0.0258, 0.0237, 0.0212, 0.0270, 0.0286, 0.0343, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-10 01:11:42,235 INFO [train.py:898] (2/4) Epoch 30, batch 2600, loss[loss=0.144, simple_loss=0.2369, pruned_loss=0.02558, over 17135.00 frames. ], tot_loss[loss=0.1544, simple_loss=0.2454, pruned_loss=0.03168, over 3607987.88 frames. ], batch size: 78, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:11:52,571 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8655, 4.5165, 4.4813, 3.4642, 3.7973, 3.5093, 2.8144, 2.7252], + device='cuda:2'), covar=tensor([0.0242, 0.0154, 0.0096, 0.0317, 0.0309, 0.0229, 0.0673, 0.0777], + device='cuda:2'), in_proj_covar=tensor([0.0078, 0.0065, 0.0073, 0.0074, 0.0094, 0.0072, 0.0080, 0.0089], + device='cuda:2'), out_proj_covar=tensor([0.0006, 0.0005, 0.0005, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006], + device='cuda:2') +2023-03-10 01:12:19,593 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108015.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:12:20,528 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.532e+02 2.560e+02 3.005e+02 3.499e+02 6.456e+02, threshold=6.010e+02, percent-clipped=1.0 +2023-03-10 01:12:44,880 INFO [train.py:898] (2/4) Epoch 30, batch 2650, loss[loss=0.1601, simple_loss=0.26, pruned_loss=0.03003, over 18578.00 frames. ], tot_loss[loss=0.1545, simple_loss=0.2454, pruned_loss=0.03182, over 3617661.47 frames. ], batch size: 54, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:13:41,200 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7449, 3.1692, 4.5303, 3.7303, 2.8015, 4.6873, 3.9614, 3.1430], + device='cuda:2'), covar=tensor([0.0501, 0.1309, 0.0287, 0.0523, 0.1510, 0.0231, 0.0537, 0.0859], + device='cuda:2'), in_proj_covar=tensor([0.0222, 0.0243, 0.0238, 0.0174, 0.0225, 0.0219, 0.0258, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-10 01:13:42,790 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-03-10 01:13:44,283 INFO [train.py:898] (2/4) Epoch 30, batch 2700, loss[loss=0.1542, simple_loss=0.2421, pruned_loss=0.03322, over 18248.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2451, pruned_loss=0.03156, over 3608013.47 frames. ], batch size: 47, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:13:49,893 INFO [scaling.py:679] (2/4) Whitening: num_groups=1, num_channels=384, metric=3.90 vs. limit=5.0 +2023-03-10 01:13:51,680 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108093.0, num_to_drop=1, layers_to_drop={0} +2023-03-10 01:14:17,647 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.687e+02 2.532e+02 2.946e+02 3.512e+02 1.150e+03, threshold=5.891e+02, percent-clipped=1.0 +2023-03-10 01:14:38,663 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([4.6154, 4.0591, 2.1849, 3.8873, 4.9278, 2.6374, 3.2740, 3.6124], + device='cuda:2'), covar=tensor([0.0320, 0.1271, 0.2027, 0.0808, 0.0144, 0.1282, 0.1074, 0.1205], + device='cuda:2'), in_proj_covar=tensor([0.0190, 0.0289, 0.0215, 0.0205, 0.0150, 0.0188, 0.0225, 0.0235], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-10 01:14:42,609 INFO [train.py:898] (2/4) Epoch 30, batch 2750, loss[loss=0.1872, simple_loss=0.2816, pruned_loss=0.04646, over 18100.00 frames. ], tot_loss[loss=0.1539, simple_loss=0.245, pruned_loss=0.03138, over 3601920.24 frames. ], batch size: 65, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:14:50,206 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8533, 4.4521, 4.3763, 3.3740, 3.6583, 3.3598, 2.4663, 2.6835], + device='cuda:2'), covar=tensor([0.0217, 0.0144, 0.0093, 0.0327, 0.0345, 0.0268, 0.0809, 0.0766], + device='cuda:2'), in_proj_covar=tensor([0.0077, 0.0065, 0.0072, 0.0073, 0.0094, 0.0072, 0.0080, 0.0088], + device='cuda:2'), out_proj_covar=tensor([0.0006, 0.0005, 0.0005, 0.0005, 0.0007, 0.0005, 0.0006, 0.0006], + device='cuda:2') +2023-03-10 01:15:16,459 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108166.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:15:41,148 INFO [train.py:898] (2/4) Epoch 30, batch 2800, loss[loss=0.1476, simple_loss=0.2385, pruned_loss=0.02838, over 18410.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2452, pruned_loss=0.03108, over 3611345.00 frames. ], batch size: 48, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:16:12,747 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108214.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:16:14,907 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.768e+02 2.463e+02 2.976e+02 3.777e+02 6.000e+02, threshold=5.952e+02, percent-clipped=1.0 +2023-03-10 01:16:40,186 INFO [train.py:898] (2/4) Epoch 30, batch 2850, loss[loss=0.1401, simple_loss=0.2232, pruned_loss=0.0285, over 18232.00 frames. ], tot_loss[loss=0.1532, simple_loss=0.2444, pruned_loss=0.03106, over 3604238.93 frames. ], batch size: 45, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:16:47,376 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5594, 3.1190, 4.4336, 3.6033, 2.6547, 4.5921, 3.9003, 3.0740], + device='cuda:2'), covar=tensor([0.0579, 0.1311, 0.0276, 0.0531, 0.1657, 0.0226, 0.0549, 0.0866], + device='cuda:2'), in_proj_covar=tensor([0.0222, 0.0243, 0.0238, 0.0174, 0.0226, 0.0218, 0.0258, 0.0197], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:2') +2023-03-10 01:17:08,462 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8138, 5.1844, 2.5353, 5.0455, 4.9403, 5.1856, 4.9868, 2.7933], + device='cuda:2'), covar=tensor([0.0251, 0.0083, 0.0867, 0.0083, 0.0083, 0.0076, 0.0097, 0.0981], + device='cuda:2'), in_proj_covar=tensor([0.0095, 0.0085, 0.0099, 0.0101, 0.0091, 0.0081, 0.0087, 0.0099], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0004, 0.0005, 0.0005, 0.0004, 0.0004, 0.0005, 0.0005], + device='cuda:2') +2023-03-10 01:17:09,651 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.7638, 3.4852, 2.3907, 4.5375, 3.2327, 4.2093, 2.7806, 4.0108], + device='cuda:2'), covar=tensor([0.0657, 0.0922, 0.1535, 0.0476, 0.0829, 0.0369, 0.1150, 0.0449], + device='cuda:2'), in_proj_covar=tensor([0.0227, 0.0237, 0.0199, 0.0302, 0.0201, 0.0272, 0.0211, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-10 01:17:38,309 INFO [train.py:898] (2/4) Epoch 30, batch 2900, loss[loss=0.1554, simple_loss=0.2449, pruned_loss=0.03301, over 17888.00 frames. ], tot_loss[loss=0.153, simple_loss=0.2442, pruned_loss=0.0309, over 3610606.26 frames. ], batch size: 70, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:18:11,957 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108315.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:18:12,786 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.844e+02 2.541e+02 3.023e+02 3.918e+02 9.927e+02, threshold=6.045e+02, percent-clipped=2.0 +2023-03-10 01:18:23,057 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108325.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:18:25,885 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0859, 5.1111, 5.3396, 5.2986, 4.9846, 5.8425, 5.4606, 5.0448], + device='cuda:2'), covar=tensor([0.1165, 0.0756, 0.0914, 0.0896, 0.1465, 0.0699, 0.0663, 0.1799], + device='cuda:2'), in_proj_covar=tensor([0.0379, 0.0308, 0.0336, 0.0339, 0.0342, 0.0448, 0.0302, 0.0443], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-10 01:18:36,945 INFO [train.py:898] (2/4) Epoch 30, batch 2950, loss[loss=0.165, simple_loss=0.2594, pruned_loss=0.0353, over 18248.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2446, pruned_loss=0.03104, over 3607846.17 frames. ], batch size: 60, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:19:03,143 INFO [scaling.py:679] (2/4) Whitening: num_groups=8, num_channels=192, metric=1.61 vs. limit=2.0 +2023-03-10 01:19:03,909 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.2638, 5.2168, 5.5451, 5.5561, 5.2154, 6.0461, 5.6903, 5.3324], + device='cuda:2'), covar=tensor([0.1114, 0.0649, 0.0820, 0.0780, 0.1447, 0.0714, 0.0710, 0.1838], + device='cuda:2'), in_proj_covar=tensor([0.0379, 0.0308, 0.0336, 0.0339, 0.0342, 0.0449, 0.0303, 0.0444], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004], + device='cuda:2') +2023-03-10 01:19:07,164 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108363.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:19:22,324 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.1947, 2.6384, 2.4157, 2.6884, 3.3637, 3.2220, 2.9678, 2.6653], + device='cuda:2'), covar=tensor([0.0226, 0.0323, 0.0590, 0.0458, 0.0226, 0.0220, 0.0447, 0.0445], + device='cuda:2'), in_proj_covar=tensor([0.0151, 0.0152, 0.0169, 0.0169, 0.0147, 0.0133, 0.0164, 0.0169], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-10 01:19:29,488 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.8297, 3.7941, 3.6603, 3.2983, 3.5832, 3.0076, 3.0657, 3.7911], + device='cuda:2'), covar=tensor([0.0070, 0.0107, 0.0091, 0.0148, 0.0106, 0.0205, 0.0197, 0.0082], + device='cuda:2'), in_proj_covar=tensor([0.0164, 0.0183, 0.0152, 0.0204, 0.0163, 0.0193, 0.0199, 0.0141], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0003, 0.0003, 0.0002], + device='cuda:2') +2023-03-10 01:19:34,122 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108386.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:19:34,878 INFO [train.py:898] (2/4) Epoch 30, batch 3000, loss[loss=0.1547, simple_loss=0.2467, pruned_loss=0.03134, over 18338.00 frames. ], tot_loss[loss=0.1534, simple_loss=0.2448, pruned_loss=0.03098, over 3606999.88 frames. ], batch size: 56, lr: 3.72e-03, grad_scale: 8.0 +2023-03-10 01:19:34,878 INFO [train.py:923] (2/4) Computing validation loss +2023-03-10 01:19:45,620 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7418, 2.3462, 2.2185, 2.3431, 2.8894, 2.8510, 2.7113, 2.4575], + device='cuda:2'), covar=tensor([0.0233, 0.0290, 0.0558, 0.0465, 0.0220, 0.0225, 0.0440, 0.0412], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0152, 0.0168, 0.0169, 0.0147, 0.0133, 0.0163, 0.0168], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-10 01:19:47,077 INFO [train.py:932] (2/4) Epoch 30, validation: loss=0.1491, simple_loss=0.2469, pruned_loss=0.02567, over 944034.00 frames. +2023-03-10 01:19:47,078 INFO [train.py:933] (2/4) Maximum memory allocated so far is 19987MB +2023-03-10 01:19:54,095 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108393.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:20:01,026 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.1179, 5.2017, 5.2089, 4.9753, 4.9216, 4.9857, 5.3001, 5.2792], + device='cuda:2'), covar=tensor([0.0063, 0.0069, 0.0059, 0.0111, 0.0062, 0.0165, 0.0081, 0.0089], + device='cuda:2'), in_proj_covar=tensor([0.0103, 0.0078, 0.0083, 0.0104, 0.0083, 0.0112, 0.0095, 0.0095], + device='cuda:2'), out_proj_covar=tensor([0.0004, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004], + device='cuda:2') +2023-03-10 01:20:20,293 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.803e+02 2.459e+02 2.931e+02 3.720e+02 6.119e+02, threshold=5.863e+02, percent-clipped=1.0 +2023-03-10 01:20:39,337 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108432.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:20:45,382 INFO [train.py:898] (2/4) Epoch 30, batch 3050, loss[loss=0.1505, simple_loss=0.2387, pruned_loss=0.03109, over 18389.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2445, pruned_loss=0.03109, over 3601992.53 frames. ], batch size: 50, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:20:50,590 INFO [zipformer.py:625] (2/4) warmup_begin=666.7, warmup_end=1333.3, batch_count=108441.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:21:43,457 INFO [train.py:898] (2/4) Epoch 30, batch 3100, loss[loss=0.162, simple_loss=0.2458, pruned_loss=0.03914, over 18547.00 frames. ], tot_loss[loss=0.1531, simple_loss=0.2443, pruned_loss=0.03099, over 3604280.41 frames. ], batch size: 49, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:21:51,140 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108493.0, num_to_drop=1, layers_to_drop={1} +2023-03-10 01:22:17,726 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.743e+02 2.471e+02 2.835e+02 3.390e+02 1.271e+03, threshold=5.670e+02, percent-clipped=3.0 +2023-03-10 01:22:41,859 INFO [train.py:898] (2/4) Epoch 30, batch 3150, loss[loss=0.1309, simple_loss=0.2127, pruned_loss=0.02457, over 18496.00 frames. ], tot_loss[loss=0.1531, simple_loss=0.2439, pruned_loss=0.03113, over 3604145.02 frames. ], batch size: 44, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:23:29,787 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108577.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:23:40,618 INFO [train.py:898] (2/4) Epoch 30, batch 3200, loss[loss=0.1569, simple_loss=0.2539, pruned_loss=0.02996, over 18561.00 frames. ], tot_loss[loss=0.1531, simple_loss=0.2438, pruned_loss=0.03119, over 3594978.44 frames. ], batch size: 54, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:24:16,502 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.665e+02 2.358e+02 2.876e+02 3.361e+02 7.357e+02, threshold=5.753e+02, percent-clipped=3.0 +2023-03-10 01:24:39,475 INFO [train.py:898] (2/4) Epoch 30, batch 3250, loss[loss=0.1653, simple_loss=0.2568, pruned_loss=0.03683, over 18304.00 frames. ], tot_loss[loss=0.1534, simple_loss=0.2442, pruned_loss=0.0313, over 3598617.26 frames. ], batch size: 54, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:24:41,005 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108638.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:25:31,367 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108681.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:25:37,783 INFO [train.py:898] (2/4) Epoch 30, batch 3300, loss[loss=0.1516, simple_loss=0.2458, pruned_loss=0.02876, over 18393.00 frames. ], tot_loss[loss=0.1529, simple_loss=0.2436, pruned_loss=0.03111, over 3595163.16 frames. ], batch size: 52, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:26:12,810 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.499e+02 2.588e+02 2.944e+02 3.535e+02 1.316e+03, threshold=5.888e+02, percent-clipped=3.0 +2023-03-10 01:26:36,473 INFO [train.py:898] (2/4) Epoch 30, batch 3350, loss[loss=0.153, simple_loss=0.2512, pruned_loss=0.02741, over 18488.00 frames. ], tot_loss[loss=0.1531, simple_loss=0.244, pruned_loss=0.03108, over 3591313.21 frames. ], batch size: 53, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:27:34,246 INFO [train.py:898] (2/4) Epoch 30, batch 3400, loss[loss=0.1735, simple_loss=0.2588, pruned_loss=0.04413, over 12672.00 frames. ], tot_loss[loss=0.1541, simple_loss=0.2451, pruned_loss=0.03157, over 3574736.79 frames. ], batch size: 130, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:27:35,612 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108788.0, num_to_drop=1, layers_to_drop={0} +2023-03-10 01:27:38,994 INFO [zipformer.py:625] (2/4) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108791.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:27:43,570 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.9286, 2.9412, 2.2336, 3.3391, 2.6045, 2.8322, 2.3531, 2.8996], + device='cuda:2'), covar=tensor([0.0539, 0.0694, 0.1167, 0.0586, 0.0686, 0.0276, 0.1012, 0.0481], + device='cuda:2'), in_proj_covar=tensor([0.0225, 0.0236, 0.0199, 0.0300, 0.0200, 0.0272, 0.0210, 0.0211], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:2') +2023-03-10 01:28:09,512 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.896e+02 2.642e+02 3.152e+02 3.667e+02 6.078e+02, threshold=6.305e+02, percent-clipped=1.0 +2023-03-10 01:28:33,132 INFO [train.py:898] (2/4) Epoch 30, batch 3450, loss[loss=0.1725, simple_loss=0.2643, pruned_loss=0.04039, over 18045.00 frames. ], tot_loss[loss=0.1537, simple_loss=0.2448, pruned_loss=0.03136, over 3579996.04 frames. ], batch size: 62, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:28:50,265 INFO [zipformer.py:625] (2/4) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108852.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:28:54,780 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([5.0568, 5.5401, 5.5220, 5.5498, 4.9689, 5.4513, 4.9681, 5.4640], + device='cuda:2'), covar=tensor([0.0239, 0.0263, 0.0188, 0.0406, 0.0411, 0.0224, 0.0963, 0.0284], + device='cuda:2'), in_proj_covar=tensor([0.0237, 0.0282, 0.0284, 0.0362, 0.0291, 0.0291, 0.0321, 0.0281], + device='cuda:2'), out_proj_covar=tensor([0.0005, 0.0006, 0.0006, 0.0008, 0.0005, 0.0006, 0.0006, 0.0006], + device='cuda:2') +2023-03-10 01:29:31,200 INFO [train.py:898] (2/4) Epoch 30, batch 3500, loss[loss=0.1246, simple_loss=0.2086, pruned_loss=0.0203, over 18385.00 frames. ], tot_loss[loss=0.1534, simple_loss=0.2443, pruned_loss=0.0312, over 3587052.05 frames. ], batch size: 42, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:30:05,393 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.699e+02 2.567e+02 2.966e+02 3.432e+02 4.808e+02, threshold=5.933e+02, percent-clipped=0.0 +2023-03-10 01:30:07,493 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([3.5754, 2.7965, 2.4749, 2.9404, 3.6178, 3.5488, 3.1155, 2.9121], + device='cuda:2'), covar=tensor([0.0177, 0.0361, 0.0582, 0.0440, 0.0189, 0.0180, 0.0401, 0.0422], + device='cuda:2'), in_proj_covar=tensor([0.0150, 0.0152, 0.0169, 0.0168, 0.0147, 0.0134, 0.0163, 0.0168], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0003, 0.0004, 0.0004, 0.0003, 0.0003, 0.0003, 0.0004], + device='cuda:2') +2023-03-10 01:30:21,154 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.7265, 3.5103, 4.7250, 4.1432, 3.1192, 2.8746, 4.2306, 4.9964], + device='cuda:2'), covar=tensor([0.0876, 0.1395, 0.0278, 0.0492, 0.1081, 0.1400, 0.0445, 0.0336], + device='cuda:2'), in_proj_covar=tensor([0.0158, 0.0289, 0.0182, 0.0191, 0.0202, 0.0201, 0.0207, 0.0221], + device='cuda:2'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:2') +2023-03-10 01:30:23,122 INFO [zipformer.py:625] (2/4) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108933.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:30:27,374 INFO [train.py:898] (2/4) Epoch 30, batch 3550, loss[loss=0.1473, simple_loss=0.2422, pruned_loss=0.02617, over 18641.00 frames. ], tot_loss[loss=0.1534, simple_loss=0.2445, pruned_loss=0.03118, over 3589782.27 frames. ], batch size: 48, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:30:33,989 INFO [zipformer.py:1455] (2/4) attn_weights_entropy = tensor([2.8041, 3.6607, 5.1846, 3.0374, 4.4946, 2.6599, 3.1469, 1.9101], + device='cuda:2'), covar=tensor([0.1342, 0.1054, 0.0176, 0.1023, 0.0521, 0.2728, 0.2780, 0.2402], + device='cuda:2'), in_proj_covar=tensor([0.0235, 0.0257, 0.0237, 0.0212, 0.0269, 0.0285, 0.0342, 0.0252], + device='cuda:2'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0003, 0.0002], + device='cuda:2') +2023-03-10 01:31:14,533 INFO [zipformer.py:625] (2/4) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108981.0, num_to_drop=0, layers_to_drop=set() +2023-03-10 01:31:20,780 INFO [train.py:898] (2/4) Epoch 30, batch 3600, loss[loss=0.1662, simple_loss=0.257, pruned_loss=0.03767, over 18355.00 frames. ], tot_loss[loss=0.1533, simple_loss=0.2442, pruned_loss=0.03116, over 3579642.16 frames. ], batch size: 56, lr: 3.71e-03, grad_scale: 8.0 +2023-03-10 01:31:52,861 INFO [optim.py:369] (2/4) Clipping_scale=2.0, grad-norm quartiles 1.673e+02 2.589e+02 3.051e+02 3.699e+02 9.146e+02, threshold=6.102e+02, percent-clipped=6.0 +2023-03-10 01:31:57,086 INFO [train.py:1165] (2/4) Done!