logical-reasoning / scripts /eval-mgtv-internlm.sh
inflaton's picture
r3 results/checkpoints
7e3048e
raw
history blame
733 Bytes
#!/bin/sh
BASEDIR=$(dirname "$0")
cd $BASEDIR/..
echo Current Directory:
pwd
BASEDIR=`pwd`
nvidia-smi
uname -a
cat /etc/os-release
lscpu
grep MemTotal /proc/meminfo
#pip install -r requirements.txt
#cd ../LLaMA-Factory && pip install -e .[torch,bitsandbytes]
pip install transformers==4.41.2
export MODEL_NAME=internlm/internlm2_5-7b-chat-1m
export LOGICAL_REASONING_DATA_PATH=datasets/mgtv
export LOGICAL_REASONING_RESULTS_PATH=results/mgtv-results_p2_full_r3.csv
export ADAPTER_PATH_BASE=llama-factory/saves/internlm2_5_7b/lora/sft_bf16_p2_full_r3
export USING_LLAMA_FACTORY=true
export START_EPOCH=0
export END_EPOCH=3
echo "Eval $MODEL_NAME with $ADAPTER_PATH_BASE"
python llm_toolkit/eval_logical_reasoning_all_epochs.py