dh-mc commited on
Commit
ace04e7
·
1 Parent(s): dc7abea
competition/10e_InternLM_NV4090_4bit_eval.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
results/mgtv-results_internlm_nv4090.csv CHANGED
The diff for this file is too large to render. See raw diff
 
scripts/eval-mgtv-nv4090.sh CHANGED
@@ -18,17 +18,17 @@ export LOAD_IN_4BIT=false
18
  export MODEL_NAME=internlm/internlm2_5-7b-chat-1m
19
  export ADAPTER_NAME_OR_PATH=inflaton-ai/InternLM_2_5-7b_LoRA-Adapter
20
  export LOGICAL_REASONING_DATA_PATH=datasets/mgtv
21
- export LOGICAL_REASONING_RESULTS_PATH=results/mgtv-results_internlm_best.csv
22
 
23
- export USE_FLOAT32_FOR_INFERENCE=false
24
 
25
- export USE_BF16_FOR_INFERENCE=false
26
- echo "Eval $MODEL_NAME with $ADAPTER_NAME_OR_PATH"
27
- python llm_toolkit/eval_logical_reasoning.py
28
 
29
  export USE_BF16_FOR_INFERENCE=true
30
- echo "Eval $MODEL_NAME with $ADAPTER_NAME_OR_PATH"
31
- python llm_toolkit/eval_logical_reasoning.py
32
 
33
  export LOAD_IN_4BIT=true
34
  echo "Eval $MODEL_NAME with $ADAPTER_NAME_OR_PATH"
 
18
  export MODEL_NAME=internlm/internlm2_5-7b-chat-1m
19
  export ADAPTER_NAME_OR_PATH=inflaton-ai/InternLM_2_5-7b_LoRA-Adapter
20
  export LOGICAL_REASONING_DATA_PATH=datasets/mgtv
21
+ export LOGICAL_REASONING_RESULTS_PATH=results/mgtv-results_internlm_nv4090.csv
22
 
23
+ # export USE_FLOAT32_FOR_INFERENCE=false
24
 
25
+ # export USE_BF16_FOR_INFERENCE=false
26
+ # echo "Eval $MODEL_NAME with $ADAPTER_NAME_OR_PATH"
27
+ # python llm_toolkit/eval_logical_reasoning.py
28
 
29
  export USE_BF16_FOR_INFERENCE=true
30
+ # echo "Eval $MODEL_NAME with $ADAPTER_NAME_OR_PATH"
31
+ # python llm_toolkit/eval_logical_reasoning.py
32
 
33
  export LOAD_IN_4BIT=true
34
  echo "Eval $MODEL_NAME with $ADAPTER_NAME_OR_PATH"