File size: 1,833 Bytes
6bacacb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 |
accum_grad: 1
cmvn_file: data/train_960/global_cmvn
dataset_conf:
batch_conf:
batch_type: dynamic
max_frames_in_batch: 4000
fbank_conf:
dither: 0.1
frame_length: 25
frame_shift: 10
num_mel_bins: 80
filter_conf:
max_length: 1650
min_length: 10
token_max_length: 200
token_min_length: 1
resample_conf:
resample_rate: 16000
shuffle: true
shuffle_conf:
shuffle_size: 1500
sort: true
sort_conf:
sort_size: 500
spec_aug: true
spec_aug_conf:
max_f: 10
max_t: 50
num_f_mask: 2
num_t_mask: 2
speed_perturb: true
decoder: bitransformer
decoder_conf:
attention_heads: 4
dropout_rate: 0.1
linear_units: 2048
num_blocks: 3
positional_dropout_rate: 0.1
r_num_blocks: 3
self_attention_dropout_rate: 0.1
src_attention_dropout_rate: 0.1
encoder: conformer
encoder_conf:
activation_type: swish
attention_dropout_rate: 0.1
attention_heads: 4
cnn_module_kernel: 15
dropout_rate: 0.1
input_layer: conv2d
linear_units: 2048
normalize_before: true
num_blocks: 12
output_size: 256
pos_enc_layer_type: rel_pos
positional_dropout_rate: 0.1
selfattention_layer_type: rel_selfattn
use_cnn_module: true
grad_clip: 4
input_dim: 80
is_json_cmvn: true
joint_conf:
activation: tanh
join_dim: 512
joint_mode: add
postjoin_linear: false
prejoin_linear: true
log_interval: 100
max_epoch: 140
model_conf:
attention_weight: 0.15
ctc_weight: 0.1
length_normalized_loss: false
lsm_weight: 0.1
reverse_weight: 0.3
transducer_weight: 0.75
optim: adam
optim_conf:
lr: 0.001
output_dim: 5002
predictor: rnn
predictor_conf:
bias: true
dropout: 0.1
embed_dropout: 0.1
embed_size: 256
hidden_size: 256
num_layers: 2
output_size: 256
rnn_type: lstm
scheduler: warmuplr
scheduler_conf:
warmup_steps: 25000
|