|
adam_beta1: 0.95 |
|
adam_beta2: 0.999 |
|
adam_epsilon: 1.0e-08 |
|
adam_weight_decay: 1.0e-06 |
|
cache_dir: null |
|
center_crop: true |
|
checkpointing_steps: 500 |
|
checkpoints_total_limit: null |
|
dataloader_num_workers: 0 |
|
dataset_config_name: null |
|
dataset_name: cifar10 |
|
ema_inv_gamma: 1.0 |
|
ema_max_decay: 0.9999 |
|
ema_power: 0.75 |
|
eval_batch_size: 16 |
|
gradient_accumulation_steps: 1 |
|
hub_model_id: null |
|
hub_private_repo: false |
|
hub_token: null |
|
learning_rate: 0.0001 |
|
local_rank: -1 |
|
logger: tensorboard |
|
logging_dir: logs |
|
lr_scheduler: cosine |
|
lr_warmup_steps: 500 |
|
mixed_precision: 'no' |
|
model_config_name_or_path: null |
|
num_epochs: 100 |
|
output_dir: cifar10-32 |
|
overwrite_output_dir: false |
|
pretrained_teacher_model_name_or_path: google/ddpm-cifar10-32 |
|
push_to_hub: true |
|
random_flip: true |
|
resolution: 32 |
|
resume_from_checkpoint: null |
|
save_images_epochs: 10 |
|
save_model_epochs: 10 |
|
train_batch_size: 16 |
|
train_data_dir: null |
|
|