File size: 1,791 Bytes
91715f7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 |
{
"dataset_name": [
"Mtot_Nbody_SIMBA",
"Mgas_SIMBA"
],
"dataset_config_name": null,
"dataset_conditional_name": null,
"model_config_name_or_path": null,
"base_channels": 64,
"cross_attention_dim": null,
"vae": null,
"vae_from_pretrained": null,
"vae_scaling_factor": null,
"train_data_dir": null,
"output_dir": "output/ddpm-ema-256-Mtot-Nbody-SIMBA-Mgas-SIMBA-cond-1686233229",
"overwrite_output_dir": false,
"cache_dir": "/content/drive/MyDrive/CAMELS",
"resolution": 256,
"data_size": 13500,
"super_resolution": null,
"local_resize": false,
"conditional": true,
"center_crop": false,
"random_flip": false,
"train_batch_size": 16,
"eval_batch_size": 16,
"dataloader_num_workers": 0,
"num_epochs": 200,
"save_images_epochs": 10,
"save_model_epochs": 10,
"gradient_accumulation_steps": 4,
"learning_rate": 0.0001,
"lr_scheduler": "cosine",
"lr_warmup_steps": 500,
"adam_beta1": 0.95,
"adam_beta2": 0.999,
"adam_weight_decay": 1e-06,
"adam_epsilon": 1e-08,
"use_ema": true,
"ema_inv_gamma": 1.0,
"ema_power": 0.75,
"ema_max_decay": 0.9999,
"push_to_hub": true,
"hub_token": "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",
"hub_model_id": null,
"hub_private_repo": false,
"logger": "wandb",
"logging_dir": "logs",
"local_rank": -1,
"mixed_precision": "no",
"prediction_type": "v_prediction",
"loss": "mse",
"ddpm_num_steps": 1000,
"ddpm_num_inference_steps": 1000,
"ddpm_beta_schedule": "squaredcos_cap_v2",
"checkpointing_steps": 10000,
"checkpoints_total_limit": null,
"resume_from_checkpoint": "latest",
"enable_xformers_memory_efficient_attention": false
} |