File size: 1,735 Bytes
d915723 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
dataset:
align_stage_components:
- download/llava-laion-cc-sbu-558k/chat.json
- download/llava-laion-cc-sbu-558k
dataset_id: llava-lrv
dataset_root_dir: /data/projects/12003782/training_data
finetune_stage_components:
- download/llava-v1.5-instruct/llava_v1_5_lrv_mix1008k.json
- download/llava-v1.5-instruct
type: llava-lrv
model:
align_epochs: 1
align_global_batch_size: 64
align_learning_rate: 0.001
align_lr_scheduler_type: linear-warmup+cosine-decay
align_max_grad_norm: 1.0
align_max_steps: null
align_per_device_batch_size: 8
align_train_strategy: fsdp-shard-grad-op
align_warmup_ratio: 0.03
align_weight_decay: 0.0
arch_specifier: fused-gelu-mlp
enable_gradient_checkpointing: true
enable_mixed_precision_training: true
finetune_epochs: 1
finetune_global_batch_size: 32
finetune_learning_rate: 2.0e-05
finetune_lr_scheduler_type: linear-warmup+cosine-decay
finetune_max_grad_norm: 1.0
finetune_max_steps: null
finetune_per_device_batch_size: 4
finetune_train_strategy: fsdp-full-shard
finetune_warmup_ratio: 0.03
finetune_weight_decay: 0.1
image_resize_strategy: resize-naive
llm_backbone_id: phi3_base
llm_max_length: 2048
model_id: dino-siglip-phi3-lora-model
reduce_in_full_precision: false
type: dino-siglip-phi3-lora-model
vision_backbone_id: dinosiglip-vit-so-384px
pretrained_checkpoint: /data/projects/12003782/model_weights/runs/llava-lrv+dino-siglip-phi3-lora-model+stage-align+x7/checkpoints/latest-checkpoint.pt
run_id: llava-lrv+dino-siglip-phi3-lora-model+stage-finetune+x7
run_root_dir: /data/projects/12003782/model_weights/runs
seed: 7
stage: finetune
trackers:
- jsonl
- wandb
wandb_entity: null
wandb_project: nscc-prismatic-phi3
|