Spaces:
Build error
Build error
File size: 1,325 Bytes
0b5e165 bead154 6b4da82 0b5e165 ba41152 be560ea 0b5e165 be560ea 0b5e165 be560ea d772d9d be560ea 0b5e165 be560ea 0b5e165 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 |
#!/bin/sh
BASEDIR=$(dirname "$0")
cd $BASEDIR/..
echo Current Directory:
pwd
BASEDIR=`pwd`
nvidia-smi
uname -a
cat /etc/os-release
lscpu
grep MemTotal /proc/meminfo
#pip install -r requirements.txt
#cd ../LLaMA-Factory && pip install -e .[torch,bitsandbytes] && cd $BASEDIR
#pip install transformers==4.41.2
#export USING_LLAMA_FACTORY=false
export USING_LLAMA_FACTORY=true
export LOAD_IN_4BIT=false
export MODEL_NAME=internlm/internlm2_5-7b-chat-1m
export ADAPTER_NAME_OR_PATH=inflaton-ai/InternLM_2_5-7b_LoRA-Adapter
export LOGICAL_REASONING_DATA_PATH=datasets/mgtv
export LOGICAL_REASONING_RESULTS_PATH=results/mgtv-results_internlm_best.csv
export USE_FLOAT32_FOR_INFERENCE=true
echo "Eval $MODEL_NAME with $ADAPTER_NAME_OR_PATH"
python llm_toolkit/eval_logical_reasoning.py
export USE_FLOAT32_FOR_INFERENCE=false
export USE_BF16_FOR_INFERENCE=true
echo "Eval $MODEL_NAME with $ADAPTER_NAME_OR_PATH"
python llm_toolkit/eval_logical_reasoning.py
export USE_FLOAT32_FOR_INFERENCE=false
export USE_BF16_FOR_INFERENCE=false
echo "Eval $MODEL_NAME with $ADAPTER_NAME_OR_PATH"
python llm_toolkit/eval_logical_reasoning.py
export LOAD_IN_4BIT=true
export USE_FLOAT32_FOR_INFERENCE=false
export USE_BF16_FOR_INFERENCE=true
echo "Eval $MODEL_NAME with $ADAPTER_NAME_OR_PATH"
python llm_toolkit/eval_logical_reasoning.py
|