jadechoghari
commited on
Commit
•
15fc9cd
1
Parent(s):
c586249
Update audioldm_train/config/mos_as_token/qa_mdt.yaml
Browse files
audioldm_train/config/mos_as_token/qa_mdt.yaml
CHANGED
@@ -3,15 +3,15 @@ project: "audioldm"
|
|
3 |
precision: "high"
|
4 |
|
5 |
# TODO: change this with your project path
|
6 |
-
base_root: "./
|
7 |
|
8 |
# TODO: change this with your pretrained path
|
9 |
# TODO: pretrained path is also needed in "base_root/offset_pretrained_checkpoints.json"
|
10 |
pretrained:
|
11 |
-
clap_music: "./
|
12 |
-
flan_t5: "./
|
13 |
-
hifi-gan: "./
|
14 |
-
roberta-base: "./
|
15 |
|
16 |
# TODO: lmdb dataset that stores pMOS of the training dataset
|
17 |
# while in inference, we don't need it !!!
|
@@ -78,7 +78,7 @@ model:
|
|
78 |
target: qa_mdt.audioldm_train.modules.latent_encoder.autoencoder.AutoencoderKL
|
79 |
params:
|
80 |
# TODO: change it with your VAE checkpoint
|
81 |
-
reload_from_ckpt: "./
|
82 |
sampling_rate: *sampling_rate
|
83 |
batchsize: 1
|
84 |
monitor: val/rec_loss
|
|
|
3 |
precision: "high"
|
4 |
|
5 |
# TODO: change this with your project path
|
6 |
+
base_root: "./qa_mdt"
|
7 |
|
8 |
# TODO: change this with your pretrained path
|
9 |
# TODO: pretrained path is also needed in "base_root/offset_pretrained_checkpoints.json"
|
10 |
pretrained:
|
11 |
+
clap_music: "./qa_mdt/checkpoints/clap_music"
|
12 |
+
flan_t5: "./qa_mdt/checkpoints/flant5"
|
13 |
+
hifi-gan: "./qa_mdt/checkpoints/hifi-gan/checkpoints"
|
14 |
+
roberta-base: "./qa_mdt/checkpoints/robertabase"
|
15 |
|
16 |
# TODO: lmdb dataset that stores pMOS of the training dataset
|
17 |
# while in inference, we don't need it !!!
|
|
|
78 |
target: qa_mdt.audioldm_train.modules.latent_encoder.autoencoder.AutoencoderKL
|
79 |
params:
|
80 |
# TODO: change it with your VAE checkpoint
|
81 |
+
reload_from_ckpt: "./qa_mdt/checkpoints/hifi-gan/checkpoints/vae_mel_16k_64bins.ckpt"
|
82 |
sampling_rate: *sampling_rate
|
83 |
batchsize: 1
|
84 |
monitor: val/rec_loss
|