|
------------ Options ------------- |
|
batch_size: 64 |
|
checkpoints_dir: ./checkpoints |
|
codebook_size: 1024 |
|
d_inner_hid: 2048 |
|
d_k: 64 |
|
d_model: 512 |
|
d_v: 64 |
|
dataset_name: t2m |
|
dim_vq_dec_hidden: 1024 |
|
dim_vq_enc_hidden: 1024 |
|
dim_vq_latent: 1024 |
|
dropout: 0.1 |
|
eval_every_e: 5 |
|
gpu_id: 1 |
|
is_continue: False |
|
is_train: True |
|
label_smoothing: False |
|
lambda_beta: 1 |
|
lambda_m2t: 1.0 |
|
log_every: 50 |
|
lr: 0.0002 |
|
m2t_v3: False |
|
max_epoch: 300 |
|
max_text_len: 20 |
|
n_dec_layers: 4 |
|
n_down: 2 |
|
n_enc_layers: 4 |
|
n_head: 8 |
|
n_resblk: 3 |
|
name: M2T_EL4_DL4_NH8_PS |
|
proj_share_weight: True |
|
q_mode: cmt |
|
save_every_e: 10 |
|
save_latest: 500 |
|
t2m_v2: False |
|
text_aug: False |
|
tokenizer_name: VQVAEV3_CB1024_CMT_H1024_NRES3 |
|
unit_length: 4 |
|
-------------- End ---------------- |
|
|