CraftsMan / configs /shape-autoencoder /l256-e64-ne8-nd16.yaml
wyysf's picture
i
0f079b2
raw
history blame
No virus
2.22 kB
exp_root_dir: "outputs"
name: "michelangelo-autoencoder/l256-e64-ne8-nd16"
tag: "${rmspace:n${data.n_samples}+${data.supervision_type}+rot${data.rotate}+noise${data.noise_sigma}+${system.shape_model.embed_type}+dsample${system.shape_model.use_downsample}+pfeat${system.shape_model.point_feats}+logits${system.loss.lambda_logits}+kl${system.loss.lambda_kl}+lr${system.optimizer.args.lr},_}"
seed: 0
data_type: "objaverse-datamodule"
data:
root_dir: "data/objaverse_clean/sdf_100k"
data_type: "sdf"
n_samples: 4096
noise_sigma: 0.
rotate: False
load_supervision: True
supervision_type: "occupancy"
n_supervision: 4096
load_image: False # whether to load images
load_caption: False # whether to load captions
batch_size: 128
num_workers: 16
system_type: "shape-autoencoder-system"
system:
sample_posterior: true
shape_model_type: "michelangelo-autoencoder"
shape_model:
num_latents: 256 # 256
embed_dim: 64
point_feats: 3 # xyz + normal
out_dim: 1 # only occupancy
embed_type: "fourier"
num_freqs: 8
include_pi: false
heads: 12
width: 768
num_encoder_layers: 8
num_decoder_layers: 16
use_ln_post: true
init_scale: 0.25
qkv_bias: true
use_flash: true
use_checkpoint: true
use_downsample: true
loggers:
wandb:
enable: false
project: "CraftsMan"
name: shape-autoencoder+${name}+${tag}
loss:
lambda_logits: 1.
lambda_kl: 0.001
optimizer:
name: AdamW
args:
lr: 1.e-4
betas: [0.9, 0.99]
eps: 1.e-6
scheduler:
name: SequentialLR
interval: step
schedulers:
- name: LinearLR
interval: step
args:
start_factor: 1e-6
end_factor: 1.0
total_iters: 5000
- name: CosineAnnealingLR
interval: step
args:
T_max: 5000
eta_min: 0.
milestones: [5000]
trainer:
num_nodes: 1
max_epochs: 100000
log_every_n_steps: 5
num_sanity_val_steps: 1
# val_check_interval: 200
check_val_every_n_epoch: 10
enable_progress_bar: true
precision: 16-mixed
checkpoint:
save_last: true
save_top_k: -1
every_n_train_steps: 5000