File size: 609 Bytes
3eb682b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 |
append_eos_token: true
batch_size_test: 64
batch_size_train: 16
end_layer_idx: 31
image_res: 224
injected_hidden_states: 6
lm_loss_weight: 0.1
num_workers: 4
optimizer: {lr: 2e-05, opt: adamW, prompt_lr: 1e-05, weight_decay: 0.02}
prompt_len: 10
prompt_tuning: true
replace_added_tokens: true
schedular: {cooldown_epochs: 0, decay_rate: 1, epochs: 8, lr: 2e-05, min_lr: 1e-06,
sched: cosine, scheduler_groups: 0, warmup_epochs: 4, warmup_lr: 1e-05}
shift_labels: false
start_layer_idx: 19
unfreeze_text_layer_norm: false
unfreeze_vision_layer_norm: false
use_cache: false
use_vis_prefix: true
warm_up: true
|