#!/usr/bin/env bash

CONFIG=$1
GPUS=$2
PORT=${PORT:-29500}

export CUDA_VISIBLE_DEVICES=0,1

PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \
python -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \
    $(dirname "$0")/train.py $CONFIG --launcher pytorch ${@:3} --resume-from /data1/users/zhengzhiyu/mtp_workplace/documents/OBBDetection/work_dirs/upernet_fcn_swin_t_10x_mota10/latest.pth --options 'find_unused_parameters'=False


# python -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \
    # $(dirname "$0")/train.py $CONFIG --launcher pytorch ${@:3} --options 'find_unused_parameters'=False
