Spaces:
Build error
Build error
BASEDIR=$(dirname "$0") | |
cd $BASEDIR/.. | |
echo Current Directory: | |
pwd | |
BASEDIR=`pwd` | |
nvidia-smi | |
uname -a | |
cat /etc/os-release | |
lscpu | |
grep MemTotal /proc/meminfo | |
#pip install -r requirements.txt | |
#cd ../LLaMA-Factory && pip install -e .[torch,bitsandbytes] && cd $BASEDIR | |
#pip install transformers==4.41.2 | |
#export USING_LLAMA_FACTORY=false | |
export USING_LLAMA_FACTORY=true | |
export LOAD_IN_4BIT=false | |
export MODEL_NAME=internlm/internlm2_5-7b-chat-1m | |
export ADAPTER_NAME_OR_PATH=inflaton-ai/InternLM_2_5-7b_LoRA-Adapter | |
export LOGICAL_REASONING_DATA_PATH=datasets/mgtv | |
export LOGICAL_REASONING_RESULTS_PATH=results/mgtv-results_internlm_best.csv | |
export USE_FLOAT32_FOR_INFERENCE=true | |
echo "Eval $MODEL_NAME with $ADAPTER_NAME_OR_PATH" | |
python llm_toolkit/eval_logical_reasoning.py | |
export USE_FLOAT32_FOR_INFERENCE=false | |
export USE_BF16_FOR_INFERENCE=true | |
echo "Eval $MODEL_NAME with $ADAPTER_NAME_OR_PATH" | |
python llm_toolkit/eval_logical_reasoning.py | |
export USE_FLOAT32_FOR_INFERENCE=false | |
export USE_BF16_FOR_INFERENCE=false | |
echo "Eval $MODEL_NAME with $ADAPTER_NAME_OR_PATH" | |
python llm_toolkit/eval_logical_reasoning.py | |
export LOAD_IN_4BIT=true | |
export USE_FLOAT32_FOR_INFERENCE=false | |
export USE_BF16_FOR_INFERENCE=true | |
echo "Eval $MODEL_NAME with $ADAPTER_NAME_OR_PATH" | |
python llm_toolkit/eval_logical_reasoning.py | |