File size: 1,869 Bytes
bf8981a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 |
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
def override(self, attrs):
if isinstance(attrs, dict):
self.__dict__.update(**attrs)
elif isinstance(attrs, (list, tuple, set)):
for attr in attrs:
self.override(attr)
elif attrs is not None:
raise NotImplementedError
return self
params_chord = AttrDict(
# Training params
batch_size=16,
max_epoch=10,
learning_rate=5e-5,
max_grad_norm=10,
fp16=True,
# unet
in_channels=2,
out_channels=2,
channels=64,
attention_levels=[2, 3],
n_res_blocks=2,
channel_multipliers=[1, 2, 4, 4],
n_heads=4,
tf_layers=1,
d_cond=12,
# ldm
linear_start=0.00085,
linear_end=0.0120,
n_steps=1000,
latent_scaling_factor=0.18215
)
params_chord_cond = AttrDict(
# Training params
batch_size=16,
max_epoch=10,
learning_rate=5e-5,
max_grad_norm=10,
fp16=True,
# unet
in_channels=4,
out_channels=2,
channels=64,
attention_levels=[2, 3],
n_res_blocks=2,
channel_multipliers=[1, 2, 4, 4],
n_heads=4,
tf_layers=1,
d_cond=2,
# ldm
linear_start=0.00085,
linear_end=0.0120,
n_steps=1000,
latent_scaling_factor=0.18215
)
params_chord_lsh_cond = AttrDict(
# Training params
batch_size=16,
max_epoch=10,
learning_rate=5e-5,
max_grad_norm=10,
fp16=True,
# unet
in_channels=6,
out_channels=2,
channels=64,
attention_levels=[2, 3],
n_res_blocks=2,
channel_multipliers=[1, 2, 4, 4],
n_heads=4,
tf_layers=1,
d_cond=2,
# ldm
linear_start=0.00085,
linear_end=0.0120,
n_steps=1000,
latent_scaling_factor=0.18215
) |