#!/bin/bash

POD_NAME=$(echo $POD_NAME)
POD_INDEX=$(echo $POD_NAME | awk -F '-' '{print $NF}')

echo "[entrypoint.sh]$POD_NAME $POD_INDEX"

[ -n "$POD_INDEX" ] || { echo "error."; exit -1; }

# 每次创建sts，应该指定一个唯一的路径，例如 train_$(date +%Y-%m-%d-%H-%M-%S) 。注意此处仅为演示，应该从外部生成该路径名。
# 该目录是共享目录
outdir="saves/DeepSeek-R1-1.5B-Distill/lora/train_7194d492-0b5f-44be-b825-732d7d40ce03"

# sts的名字是llamafactory-sft
MASTER="llamafactory-sft-0.llamafactory-sft.default.svc"

while true; do
  MASTER_IP=$(getent hosts $MASTER |awk '{print $1}')
  [ -n "$MASTER_IP" ] || { echo "[entrypoint.sh]dnslookup $MASTER" >&2; sleep 2; continue; }
  break
done

cd LLaMA-Factory/

# NNODES 是sts实例数，这里是2
# MASTER_PORT 是预先人为设定的
FORCE_TORCHRUN=1 NNODES=2 NODE_RANK=$POD_INDEX MASTER_ADDR=$MASTER_IP MASTER_PORT=29500  \
llamafactory-cli train \
  --stage sft \
  --do_train True \
  --model_name_or_path /root/.cache/huggingface/DeepSeek-R1-Distill-Qwen-1.5B \
  --preprocessing_num_workers 16 \
  --finetuning_type lora \
  --template deepseek3 \
  --flash_attn auto \
  --dataset_dir data \
  --dataset stapes \
  --cutoff_len 2048 \
  --learning_rate 0.0004 \
  --num_train_epochs 70 \
  --max_samples 1000 \
  --per_device_train_batch_size 2 \
  --gradient_accumulation_steps 8 \
  --lr_scheduler_type cosine \
  --max_grad_norm 1.0 \
  --logging_steps 5 \
  --save_steps 100 \
  --warmup_steps 0 \
  --packing False \
  --report_to none \
  --output_dir $outdir \
  --fp16 True \
  --plot_loss True \
  --trust_remote_code True \
  --ddp_timeout 180000000 \
  --include_num_input_tokens_seen True \
  --optim adamw_torch \
  --lora_rank 8 \
  --lora_alpha 16 \
  --lora_dropout 0 \
  --lora_target all



# 可选操作：测试微调效果。在master节点执行。
if [ $POD_INDEX -eq 0 ]; then
    # 1 合并模型
    python merge.py --base-model-path /root/.cache/huggingface/DeepSeek-R1-Distill-Qwen-1.5B --lora-model-path $outdir
    # 2 比较推理结果
    python test-sft.py --base-model-path /root/.cache/huggingface/DeepSeek-R1-Distill-Qwen-1.5B
    # 3 删除合并后的模型
    rm -rf ./merged_model/
fi

echo "[entrypoint.sh]finished"
/bin/sleep infinity

