checkpoint: exp/cnceleb_v2/resnet/wespeaker/renset34_new_margin-LM/models/model_0.pt data_type: shard dataloader_args: batch_size: 64 drop_last: true num_workers: 16 pin_memory: false prefetch_factor: 8 dataset_args: aug_prob: 0.6 fbank_args: dither: 1.0 frame_length: 25 frame_shift: 10 num_mel_bins: 80 num_frms: 600 shuffle: true shuffle_args: shuffle_size: 2500 spec_aug: false spec_aug_args: max_f: 8 max_t: 10 num_f_mask: 1 num_t_mask: 1 prob: 0.6 speed_perturb: false do_lm: true exp_dir: exp/cnceleb_v2/resnet/wespeaker/renset34_new_margin-LM gpus: - 0 - 1 - 2 - 3 - 4 - 5 - 6 - 7 log_batch_interval: 100 loss: CrossEntropyLoss loss_args: {} margin_scheduler: MarginScheduler margin_update: epoch_iter: 992 final_margin: 0.5 fix_start_epoch: 1 increase_start_epoch: 1 increase_type: exp initial_margin: 0.5 update_margin: true model: ResNet34 model_args: embed_dim: 256 feat_dim: 80 pooling_func: TSTP two_emb_layer: false model_init: null noise_data: data/cnceleb_wespeaker/musan/lmdb num_avg: 1 num_epochs: 5 optimizer: SGD optimizer_args: lr: 0.0001 momentum: 0.9 nesterov: true weight_decay: 0.0001 projection_args: easy_margin: false embed_dim: 256 num_class: 8379 project_type: arc_margin scale: 32.0 reverb_data: data/cnceleb_wespeaker/rirs/lmdb save_epoch_interval: 1 scheduler: ExponentialDecrease scheduler_args: epoch_iter: 992 final_lr: 2.5e-05 initial_lr: 0.0001 num_epochs: 5 scale_ratio: 8.0 warm_from_zero: true warm_up_epoch: 1 seed: 42 train_data: data/cnceleb_wespeaker/cnceleb_train/shard.list train_label: data/cnceleb_wespeaker/cnceleb_train/utt2spk