File size: 3,343 Bytes
f2c24d4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5bee1dc
 
 
 
 
f2c24d4
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
{
    "prior": {
        "clip": {
            "make": "openclip",
            "model": "ViT-H-14"
        },
        "net": {
            "dim": 1024,
            "depth": 12,
            "num_timesteps": 1000,
            "max_text_len": 77,
            "num_time_embeds": 1,
            "num_image_embeds": 1,
            "num_text_embeds": 1,
            "dim_head": 64,
            "heads": 12,
            "ff_mult": 4,
            "norm_out": true,
            "attn_dropout": 0.05,
            "ff_dropout": 0.05,
            "final_proj": true,
            "normformer": true,
            "rotary_emb": true
        },
        "image_embed_dim": 1024,
        "image_size": 224,
        "image_channels": 3,
        "timesteps": 1000,
        "sample_timesteps": 64,
        "cond_drop_prob": 0.1,
        "loss_type": "l2",
        "predict_x_start": true,
        "beta_schedule": "cosine",
        "condition_on_text_encodings": true
    },
    "data": {
        "batch_size": 350,
        "num_data_points": 2e9,
        "eval_every_seconds": 1600,
        "image_url": "s3://s-laion/vit-h-14-embeddings/img_emb/",
        "meta_url": "s3://s-laion/vit-h-14-embeddings/metadata/",
        "splits": {
            "train": 0.9,
            "val":  2.8e-6,
            "test": 0.0999972 
        }
    },
    "train": {
        "epochs": 5,
        "lr": 1.1e-4,
        "wd": 6.02e-2,
        "max_grad_norm": 0.5,
        "use_ema": true,
        "ema_beta": 0.9999,
        "ema_update_after_step": 51,
        "warmup_steps": 50,
        "amp": false,
        "save_every_seconds": 3600,
        "eval_timesteps": [64, 1000],
        "random_seed": 84513
    },
    "tracker": {
        "data_path": ".prior-big-run",
        "overwrite_data_path": true,
        "log": {
            "log_type": "wandb",
            "wandb_entity": "nousr_laion",
            "wandb_project": "h-14-prior",
            "wandb_resume": false,
            "verbose": true
        },
        "save": [
            {
                "save_to": "local",
                "save_type": "checkpoint",
                "save_latest_to": ".prior-big-run/latest_checkpoint.pth",
                "save_best_to": ".prior-big-run/best_checkpoint.pth"
            },
            {
                "save_to": "local",
                "ave_type": "model",
                "save_latest_to": ".prior-big-run/latest_model.pth",
                "save_best_to": ".prior-big-run/best_model.pth"
            },
            {
                "save_to": "huggingface",
                "huggingface_repo": "nousr/dalle2-pytorch",
                "save_meta_to": "h14-many-gpus/",
                "save_latest_to": "h14-many-gpus/latest_model.pth",
                "save_best_to": "h14-many-gpus/best_model.pth",
                "save_type": "model"
            },
            {
                "save_to": "huggingface",
                "huggingface_repo": "nousr/dalle2-pytorch",
                "save_meta_to": "h14-many-gpus/",
                "save_latest_to": "h14-many-gpus/latest_checkpoint.pth",
                "save_best_to": "h14-many-gpus/best_checkpoint.pth",
                "save_type": "checkpoint"
            }
        ],
        "load": {
            "load_from": "local",
            "file_path": ".prior-resume/latest_checkpoint.pth"
        }
    }
}