{ "prior": { "clip": { "make": "openai", "model": "ViT-L/14" }, "net": { "dim": 768, "depth": 12, "num_timesteps": 1000, "num_time_embeds": 1, "num_image_embeds": 1, "num_text_embeds": 1, "dim_head": 64, "heads": 12, "ff_mult": 4, "norm_out": true, "attn_dropout": 0.00, "ff_dropout": 0.00, "final_proj": true, "normformer": true, "rotary_emb": true }, "image_embed_dim": 768, "image_size": 224, "image_channels": 3, "timesteps": 1000, "sample_timesteps": 64, "cond_drop_prob": 0.0, "loss_type": "l2", "predict_x_start": true, "beta_schedule": "cosine", "condition_on_text_encodings": true }, "data": { "batch_size": 256, "num_data_points": 250000000, "eval_every_seconds": 1800, "image_url": "s3://s-datasets/laion-aesthetic/embeddings/img_emb", "meta_url": "s3://s-datasets/laion-aesthetic/embeddings/metadata", "splits": { "train": 0.9, "val": 2.4e-05, "test":0.09997599999999998 } }, "train": { "epochs": 5, "lr": 1.1e-4, "wd": 6.02e-2, "max_grad_norm": 0.5, "use_ema": true, "ema_beta": 0.9999, "amp": false, "save_every_seconds": 3600, "eval_timesteps": [64, 250, 1000] }, "tracker": { "data_path": ".prior-updates", "overwrite_data_path": true, "log": { "log_type": "wandb", "wandb_entity": "nousr_laion", "wandb_project": "dalle2_diffusion_prior", "wandb_resume": false, "verbose": true }, "load": { "resume": false }, "save": [ { "save_to": "local", "save_type": "checkpoint", "save_latest_to": ".prior-updates/latest_checkpoint.pth", "save_best_to": ".prior-updates/best_checkpoint.pth" }, { "save_to": "huggingface", "huggingface_repo": "nousr/conditioned-prior", "save_meta_to": "vit-l-14/aesthetic/", "save_latest_to": "vit-l-14/aesthetic/latest.pth", "save_best_to": "vit-l-14/aesthetic/best.pth", "save_type": "model" } ] } }