Azusa-GPT-SoVITS / configs /s1big2.yaml
XzJosh's picture
Upload 66 files
f643c3e verified
raw
history blame
529 Bytes
train:
seed: 1234
epochs: 300
batch_size: 12
gradient_accumulation: 4
save_every_n_epoch: 1
precision: 16-mixed
gradient_clip: 1.0
optimizer:
lr: 0.01
lr_init: 0.00001
lr_end: 0.0001
warmup_steps: 2000
decay_steps: 40000
data:
max_eval_sample: 8
max_sec: 54
num_workers: 1
pad_val: 1024 # same with EOS in model
model:
vocab_size: 1025
phoneme_vocab_size: 512
embedding_dim: 1024
hidden_dim: 1024
head: 16
linear_units: 2048
n_layer: 6
dropout: 0
EOS: 1024
inference:
top_k: 5