protpardelle / configs /seqdes.yml
Simon Duerr
fix paths
cd4cab5
raw
history blame
1.92 kB
train:
home_dir: '/home/user/app'
seed: 0
checkpoint: ['', 0]
batch_size: 32
max_epochs: 10000
eval_freq: 3600 # seconds
checkpoint_freq: 20
checkpoints: []
lr: 0.0001
warmup_steps: 1000
decay_steps: 400_000
clip_grad_norm: True
grad_clip_val: 1.0
weight_decay: 0.0
n_eval_samples: 8
sample_length_range: [50, 512]
sc_num_seqs: 4
eval_loss_t: [0.1, 0.3, 0.5, 0.7, 0.9]
self_cond_train_prob: 0.9
dgram_loss_weight: False
subsample_eval_set: 0.1
crop_conditional: False
data:
pdb_path: 'datasets/ingraham_cath_dataset'
fixed_size: 512
n_aatype_tokens: 21
se3_data_augment: True
sigma_data: 10.0
diffusion:
training:
function: 'mpnn'
psigma_mean: -1.2
psigma_std: 1.2
time_power: 30.0
constant_val: 0.02
sampling:
function: 'uniform'
s_min: 0.001
s_max: 60
model:
task: 'seqdes' # 'backbone', 'allatom', 'seqdes', 'codesign'
pretrained_modules: ['struct_model'] # 'struct_model', 'mpnn_model'
struct_model_checkpoint: 'protpardelle/checkpoints/allatom_state_dict.pth'
mpnn_model_checkpoint: ''
crop_conditional: False
dummy_fill_masked_atoms: False
debug_mpnn: True
struct_model:
arch: 'uvit'
n_channel: 256
n_atoms: 37
noise_cond_mult: 4
uvit:
patch_size: 1
n_layers: 6
n_heads: 8
dim_head: 32
n_filt_per_layer: [] # None or [] for vanilla trf
n_blocks_per_layer: 2
cat_pwd_to_conv: False
conv_skip_connection: False # n layers must == 1
position_embedding_type: 'rotary'
mpnn_model:
use_self_conditioning: False
label_smoothing: 0.0
n_channel: 128
n_layers: 3
n_neighbors: 32
noise_cond_mult: 4