#!/bin/bash

# DS_BUILD_FUSED_ADAM=1 pip install -U deepspeed==0.6.5
export PYTHONPATH=$PYTHONPATH:`pwd`/src
export NCCL_SOCKET_IFNAME=eth0
export NCCL_IB_DISABLE=0
export NCCL_IB_CUDA_SUPPORT=1
export NCCL_IB_GID_INDEX=0
export NCCL_IB_HCA=mlx5_2,mlx5_5
export NCCL_DEBUG=info
export OMP_NUM_THREADS=4
echo "master_ip: $MASTER_ADDR"
cd src
rm -rf logs/m18_laion2b
# laion2b-multi
train_data="/share/projset/laion5B-data/laion2b_multi_data/img_data_filter/{000000..162417}.tar"
# laion400m-en
# train_data="laion400m/laion400m-full-release/img_data/laion400m-dat-release/{00000..41455}.tar"

TOKENIZERS_PARALLELISM=false WANDB_MODE=offline HF_DATASETS_OFFLINE=1 python  -m torch.distributed.launch --nproc_per_node=8 \
       	--nnodes=$WORLD_SIZE --node_rank=$RANK \
	--master_addr=$MASTER_ADDR --master_port=28379 --use_env \
    training/main_deepspeed.py \
        --dataset-type="webdataset" \
        --train-num-samples 2000000000 \
        --train-data="${train_data}" \
        --dataset-resampled \
        --save-frequency 1 \
        --zeroshot-frequency 1 \
        --report-to="wandb" \
        --wandb-notes="laion2b_wandb" \
        --imagenet-val="/share/projset/baaishare/baai-mrnd/datasets/imagenet2012/val/" \
        --warmup 2000 \
        --batch-size=128 \
        --epochs=2 \
        --lr=2e-6 \
        --wd=0.05 \
        --norm_gradient_clip=5.0 \
        --workers=4 \
        --model ViT-L-14 \
        --name='m18_laion2b'\
        --seed 3407 \
        --text-distil='xlmr-large' \
        --gather-with-grad \
        --language-zh \
        --lock-image \
        --env_type="pytorchDDP" \
