baibaibai commited on
Commit
4364b90
1 Parent(s): e882f51

Upload 10 files

Browse files
checkpoints/xiaxiaobai/config.yaml ADDED
@@ -0,0 +1,445 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ K_step: 1000
2
+ accumulate_grad_batches: 1
3
+ audio_num_mel_bins: 128
4
+ audio_sample_rate: 44100
5
+ binarization_args:
6
+ shuffle: false
7
+ with_align: true
8
+ with_f0: true
9
+ with_hubert: true
10
+ with_spk_embed: false
11
+ with_wav: false
12
+ binarizer_cls: preprocessing.SVCpre.SVCBinarizer
13
+ binary_data_dir: data/binary/xiaxiaobai
14
+ check_val_every_n_epoch: 10
15
+ choose_test_manually: false
16
+ clip_grad_norm: 1
17
+ config_path: training/config_nsf.yaml
18
+ content_cond_steps: []
19
+ cwt_add_f0_loss: false
20
+ cwt_hidden_size: 128
21
+ cwt_layers: 2
22
+ cwt_loss: l1
23
+ cwt_std_scale: 0.8
24
+ datasets:
25
+ - opencpop
26
+ debug: false
27
+ dec_ffn_kernel_size: 9
28
+ dec_layers: 4
29
+ decay_steps: 40000
30
+ decoder_type: fft
31
+ dict_dir: ''
32
+ diff_decoder_type: wavenet
33
+ diff_loss_type: l2
34
+ dilation_cycle_length: 4
35
+ dropout: 0.1
36
+ ds_workers: 4
37
+ dur_enc_hidden_stride_kernel:
38
+ - 0,2,3
39
+ - 0,2,3
40
+ - 0,1,3
41
+ dur_loss: mse
42
+ dur_predictor_kernel: 3
43
+ dur_predictor_layers: 5
44
+ enc_ffn_kernel_size: 9
45
+ enc_layers: 4
46
+ encoder_K: 8
47
+ encoder_type: fft
48
+ endless_ds: false
49
+ f0_bin: 256
50
+ f0_max: 1100.0
51
+ f0_min: 40.0
52
+ ffn_act: gelu
53
+ ffn_padding: SAME
54
+ fft_size: 2048
55
+ fmax: 16000
56
+ fmin: 40
57
+ fs2_ckpt: ''
58
+ gaussian_start: true
59
+ gen_dir_name: ''
60
+ gen_tgt_spk_id: -1
61
+ hidden_size: 256
62
+ hop_size: 512
63
+ hubert_gpu: true
64
+ hubert_path: checkpoints/hubert/hubert_soft.pt
65
+ infer: false
66
+ keep_bins: 128
67
+ lambda_commit: 0.25
68
+ lambda_energy: 0.0
69
+ lambda_f0: 1.0
70
+ lambda_ph_dur: 0.3
71
+ lambda_sent_dur: 1.0
72
+ lambda_uv: 1.0
73
+ lambda_word_dur: 1.0
74
+ load_ckpt: ''
75
+ log_interval: 100
76
+ loud_norm: false
77
+ lr: 0.0008
78
+ max_beta: 0.02
79
+ max_epochs: 3000
80
+ max_eval_sentences: 1
81
+ max_eval_tokens: 60000
82
+ max_frames: 42000
83
+ max_input_tokens: 60000
84
+ max_sentences: 75
85
+ max_tokens: 128000
86
+ max_updates: 1000000
87
+ mel_loss: ssim:0.5|l1:0.5
88
+ mel_vmax: 1.5
89
+ mel_vmin: -6.0
90
+ min_level_db: -120
91
+ no_fs2: true
92
+ norm_type: gn
93
+ num_ckpt_keep: 10
94
+ num_heads: 2
95
+ num_sanity_val_steps: 1
96
+ num_spk: 1
97
+ num_test_samples: 0
98
+ num_valid_plots: 10
99
+ optimizer_adam_beta1: 0.9
100
+ optimizer_adam_beta2: 0.98
101
+ out_wav_norm: false
102
+ pe_ckpt: checkpoints/0102_xiaoma_pe/model_ckpt_steps_60000.ckpt
103
+ pe_enable: false
104
+ perform_enhance: true
105
+ pitch_ar: false
106
+ pitch_enc_hidden_stride_kernel:
107
+ - 0,2,5
108
+ - 0,2,5
109
+ - 0,2,5
110
+ pitch_extractor: parselmouth
111
+ pitch_loss: l2
112
+ pitch_norm: log
113
+ pitch_type: frame
114
+ pndm_speedup: 10
115
+ pre_align_args:
116
+ allow_no_txt: false
117
+ denoise: false
118
+ forced_align: mfa
119
+ txt_processor: zh_g2pM
120
+ use_sox: true
121
+ use_tone: false
122
+ pre_align_cls: data_gen.singing.pre_align.SingingPreAlign
123
+ predictor_dropout: 0.5
124
+ predictor_grad: 0.1
125
+ predictor_hidden: -1
126
+ predictor_kernel: 5
127
+ predictor_layers: 5
128
+ prenet_dropout: 0.5
129
+ prenet_hidden_size: 256
130
+ pretrain_fs_ckpt: ''
131
+ processed_data_dir: xxx
132
+ profile_infer: false
133
+ raw_data_dir: data/raw/xiaxiaobai
134
+ ref_norm_layer: bn
135
+ rel_pos: true
136
+ reset_phone_dict: true
137
+ residual_channels: 384
138
+ residual_layers: 20
139
+ save_best: false
140
+ save_ckpt: true
141
+ save_codes:
142
+ - configs
143
+ - modules
144
+ - src
145
+ - utils
146
+ save_f0: true
147
+ save_gt: false
148
+ schedule_type: linear
149
+ seed: 1234
150
+ sort_by_len: true
151
+ speaker_id: xiaxiaobai
152
+ spec_max:
153
+ - -0.03524341061711311
154
+ - -0.22983506321907043
155
+ - 0.14042672514915466
156
+ - 0.24598221480846405
157
+ - 0.38769328594207764
158
+ - 0.403020977973938
159
+ - 0.4710584282875061
160
+ - 0.46678662300109863
161
+ - 0.38870999217033386
162
+ - 0.6632387638092041
163
+ - 0.6111851334571838
164
+ - 0.6690583229064941
165
+ - 0.7419546842575073
166
+ - 0.737534761428833
167
+ - 0.7382131218910217
168
+ - 0.8168102502822876
169
+ - 0.8108615279197693
170
+ - 0.8472557067871094
171
+ - 0.8082121014595032
172
+ - 0.9421120882034302
173
+ - 0.9718508124351501
174
+ - 0.9050439596176147
175
+ - 0.9559441208839417
176
+ - 0.9039924740791321
177
+ - 0.8825679421424866
178
+ - 0.8896527290344238
179
+ - 0.9281688928604126
180
+ - 0.9495793581008911
181
+ - 0.9456884860992432
182
+ - 0.8448024392127991
183
+ - 0.706383228302002
184
+ - 0.8815341591835022
185
+ - 0.8132895827293396
186
+ - 0.6926009058952332
187
+ - 0.7048583626747131
188
+ - 0.7063571810722351
189
+ - 0.6628313064575195
190
+ - 0.6794947385787964
191
+ - 0.5719707608222961
192
+ - 0.5986276268959045
193
+ - 0.8701962232589722
194
+ - 0.6562694311141968
195
+ - 0.5677947998046875
196
+ - 0.617889404296875
197
+ - 0.6444329023361206
198
+ - 0.8564671874046326
199
+ - 0.81944340467453
200
+ - 0.5799424052238464
201
+ - 0.4977129399776459
202
+ - 0.39452463388442993
203
+ - 0.46540209650993347
204
+ - 0.3466089963912964
205
+ - 0.43464744091033936
206
+ - 0.3072132170200348
207
+ - 0.2883022129535675
208
+ - 0.13554586470127106
209
+ - 0.1449982076883316
210
+ - 0.06144052743911743
211
+ - 0.032265570014715195
212
+ - -0.0411582849919796
213
+ - -0.03320876136422157
214
+ - 0.09776383638381958
215
+ - -0.10860992968082428
216
+ - -0.16577060520648956
217
+ - -0.25543949007987976
218
+ - -0.15455809235572815
219
+ - -0.16559940576553345
220
+ - -0.08075764775276184
221
+ - -0.16779740154743195
222
+ - -0.13772723078727722
223
+ - -0.20297837257385254
224
+ - -0.29985055327415466
225
+ - -0.32260239124298096
226
+ - -0.3287070095539093
227
+ - -0.4775390326976776
228
+ - -0.5171418190002441
229
+ - -0.3881285488605499
230
+ - -0.3670082092285156
231
+ - -0.4483766555786133
232
+ - -0.4888381361961365
233
+ - -0.3579249382019043
234
+ - -0.3966373801231384
235
+ - -0.39407745003700256
236
+ - -0.42390042543411255
237
+ - -0.5861775875091553
238
+ - -0.380365252494812
239
+ - -0.6373982429504395
240
+ - -0.6403031945228577
241
+ - -0.6271409392356873
242
+ - -0.42471981048583984
243
+ - -0.660955011844635
244
+ - -0.566235363483429
245
+ - -0.6491592526435852
246
+ - -0.6828501224517822
247
+ - -0.5224244594573975
248
+ - -0.48923757672309875
249
+ - -0.5497328042984009
250
+ - -0.5110473036766052
251
+ - -0.5995211005210876
252
+ - -0.6271748542785645
253
+ - -0.6996229887008667
254
+ - -0.7070512771606445
255
+ - -0.6818519234657288
256
+ - -0.6255654692649841
257
+ - -0.5862824320793152
258
+ - -0.41896769404411316
259
+ - -0.4821942150592804
260
+ - -0.6815913915634155
261
+ - -0.8292946219444275
262
+ - -0.787152111530304
263
+ - -0.7727371454238892
264
+ - -0.6443832516670227
265
+ - -0.6990486979484558
266
+ - -0.7400271892547607
267
+ - -0.7830663919448853
268
+ - -0.8243145942687988
269
+ - -0.8491696119308472
270
+ - -0.765605092048645
271
+ - -1.025327205657959
272
+ - -1.0541386604309082
273
+ - -1.113960862159729
274
+ - -1.1958088874816895
275
+ - -1.2482109069824219
276
+ - -1.336235523223877
277
+ - -1.4202933311462402
278
+ - -1.4578213691711426
279
+ - -1.4341062307357788
280
+ - -1.4737169742584229
281
+ spec_min:
282
+ - -4.999994277954102
283
+ - -4.999994277954102
284
+ - -4.999994277954102
285
+ - -4.999994277954102
286
+ - -4.999994277954102
287
+ - -4.999994277954102
288
+ - -4.999994277954102
289
+ - -4.999994277954102
290
+ - -4.999994277954102
291
+ - -4.999994277954102
292
+ - -4.999994277954102
293
+ - -4.999994277954102
294
+ - -4.999994277954102
295
+ - -4.999994277954102
296
+ - -4.999994277954102
297
+ - -4.999994277954102
298
+ - -4.999994277954102
299
+ - -4.999994277954102
300
+ - -4.999994277954102
301
+ - -4.999994277954102
302
+ - -4.999994277954102
303
+ - -4.999994277954102
304
+ - -4.999994277954102
305
+ - -4.999994277954102
306
+ - -4.999994277954102
307
+ - -4.999994277954102
308
+ - -4.999994277954102
309
+ - -4.999994277954102
310
+ - -4.999994277954102
311
+ - -4.999994277954102
312
+ - -4.999994277954102
313
+ - -4.999994277954102
314
+ - -4.999994277954102
315
+ - -4.999994277954102
316
+ - -4.999994277954102
317
+ - -4.999994277954102
318
+ - -4.999994277954102
319
+ - -4.999994277954102
320
+ - -4.999994277954102
321
+ - -4.999994277954102
322
+ - -4.999994277954102
323
+ - -4.999994277954102
324
+ - -4.999994277954102
325
+ - -4.999994277954102
326
+ - -4.999994277954102
327
+ - -4.999994277954102
328
+ - -4.999994277954102
329
+ - -4.999994277954102
330
+ - -4.999994277954102
331
+ - -4.999994277954102
332
+ - -4.999994277954102
333
+ - -4.999994277954102
334
+ - -4.999994277954102
335
+ - -4.999994277954102
336
+ - -4.999994277954102
337
+ - -4.999994277954102
338
+ - -4.999994277954102
339
+ - -4.999994277954102
340
+ - -4.999994277954102
341
+ - -4.999994277954102
342
+ - -4.999994277954102
343
+ - -4.999994277954102
344
+ - -4.999994277954102
345
+ - -4.999994277954102
346
+ - -4.999994277954102
347
+ - -4.999994277954102
348
+ - -4.999994277954102
349
+ - -4.999994277954102
350
+ - -4.999994277954102
351
+ - -4.999994277954102
352
+ - -4.999994277954102
353
+ - -4.999994277954102
354
+ - -4.999994277954102
355
+ - -4.999994277954102
356
+ - -4.999994277954102
357
+ - -4.999994277954102
358
+ - -4.999994277954102
359
+ - -4.999994277954102
360
+ - -4.999994277954102
361
+ - -4.999994277954102
362
+ - -4.999994277954102
363
+ - -4.999994277954102
364
+ - -4.999994277954102
365
+ - -4.999994277954102
366
+ - -4.999994277954102
367
+ - -4.999994277954102
368
+ - -4.999994277954102
369
+ - -4.999994277954102
370
+ - -4.999994277954102
371
+ - -4.999994277954102
372
+ - -4.999994277954102
373
+ - -4.999994277954102
374
+ - -4.999994277954102
375
+ - -4.999994277954102
376
+ - -4.999994277954102
377
+ - -4.999994277954102
378
+ - -4.999994277954102
379
+ - -4.999994277954102
380
+ - -4.999994277954102
381
+ - -4.999994277954102
382
+ - -4.999994277954102
383
+ - -4.999994277954102
384
+ - -4.999994277954102
385
+ - -4.999994277954102
386
+ - -4.999994277954102
387
+ - -4.999994277954102
388
+ - -4.999994277954102
389
+ - -4.999994277954102
390
+ - -4.999994277954102
391
+ - -4.999994277954102
392
+ - -4.999994277954102
393
+ - -4.999994277954102
394
+ - -4.999994277954102
395
+ - -4.999994277954102
396
+ - -4.999994277954102
397
+ - -4.999994277954102
398
+ - -4.999994277954102
399
+ - -4.999994277954102
400
+ - -4.999994277954102
401
+ - -4.999994277954102
402
+ - -4.999994277954102
403
+ - -4.999994277954102
404
+ - -4.999994277954102
405
+ - -4.999994277954102
406
+ - -4.999994277954102
407
+ - -4.999994277954102
408
+ - -4.999994277954102
409
+ - -4.999994277954102
410
+ spk_cond_steps: []
411
+ stop_token_weight: 5.0
412
+ task_cls: training.task.SVC_task.SVCTask
413
+ test_ids: []
414
+ test_input_dir: ''
415
+ test_num: 0
416
+ test_prefixes:
417
+ - test
418
+ test_set_name: test
419
+ timesteps: 1000
420
+ train_set_name: train
421
+ use_crepe: true
422
+ use_denoise: false
423
+ use_energy_embed: false
424
+ use_gt_dur: false
425
+ use_gt_f0: false
426
+ use_midi: false
427
+ use_nsf: true
428
+ use_pitch_embed: true
429
+ use_pos_embed: true
430
+ use_spk_embed: false
431
+ use_spk_id: false
432
+ use_split_spk_id: false
433
+ use_uv: false
434
+ use_var_enc: false
435
+ use_vec: false
436
+ val_check_interval: 2000
437
+ valid_num: 0
438
+ valid_set_name: valid
439
+ vocoder: network.vocoders.nsf_hifigan.NsfHifiGAN
440
+ vocoder_ckpt: checkpoints/nsf_hifigan/model
441
+ warmup_updates: 2000
442
+ wav2spec_eps: 1e-6
443
+ weight_decay: 0
444
+ win_size: 2048
445
+ work_dir: checkpoints/xiaxiaobai
checkpoints/xiaxiaobai/model_ckpt_steps_10000.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9ebec7700468f211c295184edf92817ad66fd3049ad5033dad2c0b9d04c196ed
3
+ size 391372697
checkpoints/xiaxiaobai/model_ckpt_steps_100000.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00e03e3a135b11671762e9c4df6f182b40cec107a3749a278fa5f2951705795a
3
+ size 391372697
checkpoints/xiaxiaobai/model_ckpt_steps_130000.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:45d33d7db6ecc57287b0fa953e1d5152e6ab8b8fc1effde4e75fd20b8b9665c1
3
+ size 391372697
checkpoints/xiaxiaobai/model_ckpt_steps_140000.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47dbce8c955e53dff08d3f97b800e7e003a2093514faef3cbeee5b543699e63d
3
+ size 391372697
checkpoints/xiaxiaobai/model_ckpt_steps_150000.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f443a9e69459fd5ec9052434d3b9a8acad1700520b62bca2e071f9e1aaa06ea
3
+ size 391372697
checkpoints/xiaxiaobai/model_ckpt_steps_160000.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a3ddc0f4e7a63ca1482be28b1299248c3bfc1702de291e7a50cf994b7b39ec9e
3
+ size 391372697
checkpoints/xiaxiaobai/model_ckpt_steps_4000.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ef503468db3bbb52f71fe3a8189ffd7018ea4c21121b0e6d3c24c6401eb3828
3
+ size 391372697
checkpoints/xiaxiaobai/model_ckpt_steps_6000.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8454ad1065525ba811b8faeb4a9d51a62c9eb82c4f3634e741f100af6a036845
3
+ size 391372697
checkpoints/xiaxiaobai/model_ckpt_steps_8000.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1b378ff4fc65d64b6b661dd06646c4c27cb150780925fb828e97c090cfce0af
3
+ size 391372697