Spaces:
Build error
Build error
final rank: 42
Browse filesThis view is limited to 50 files because it contains too many changes. Β
See raw diff
- competition/07a_Gemma2_Colab.ipynb +0 -0
- competition/15b_Gemma_p2_analysis.ipynb +0 -0
- competition/16_Submissions.ipynb +0 -0
- llm_toolkit/tune_logical_reasoning.py +3 -3
- results/mgtv-results_colab_p2_gemma2.csv +2 -2
- results/mgtv-results_p2_r3_full_metrics.csv +2 -2
- scripts/eval-mgtv-gemma2_9b.sh +31 -0
- scripts/tune-mgtv-qwen2_7b.sh +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/README.md +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/adapter_config.json +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/adapter_model.safetensors +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/optimizer.pt +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/rng_state.pth +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/scheduler.pt +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/special_tokens_map.json +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/tokenizer.json +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/tokenizer.model +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/tokenizer_config.json +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/trainer_state.json +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/training_args.bin +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/README.md +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/adapter_config.json +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/adapter_model.safetensors +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/optimizer.pt +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/rng_state.pth +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/scheduler.pt +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/special_tokens_map.json +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/tokenizer.json +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/tokenizer.model +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/tokenizer_config.json +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/trainer_state.json +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/training_args.bin +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/README.md +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/adapter_config.json +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/adapter_model.safetensors +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/optimizer.pt +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/rng_state.pth +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/scheduler.pt +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/special_tokens_map.json +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/tokenizer.json +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/tokenizer.model +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/tokenizer_config.json +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/trainer_state.json +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/training_args.bin +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-11000/README.md +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-11000/adapter_config.json +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-11000/adapter_model.safetensors +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-11000/optimizer.pt +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-11000/rng_state.pth +0 -0
- unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-11000/scheduler.pt +0 -0
competition/07a_Gemma2_Colab.ipynb
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
competition/15b_Gemma_p2_analysis.ipynb
ADDED
The diff for this file is too large to render.
See raw diff
|
|
competition/16_Submissions.ipynb
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
llm_toolkit/tune_logical_reasoning.py
CHANGED
@@ -140,7 +140,7 @@ tokenizer.save_pretrained(local_model)
|
|
140 |
|
141 |
print("Evaluating fine-tuned model: " + model_name)
|
142 |
FastLanguageModel.for_inference(model) # Enable native 2x faster inference
|
143 |
-
predictions = eval_model(model, tokenizer,
|
144 |
|
145 |
gpu_stats = torch.cuda.get_device_properties(0)
|
146 |
start_gpu_memory = round(torch.cuda.max_memory_reserved() / 1024 / 1024 / 1024, 3)
|
@@ -151,12 +151,12 @@ print(f"{start_gpu_memory} GB of memory reserved.")
|
|
151 |
save_results(
|
152 |
model_name + "(unsloth_finetuned)",
|
153 |
results_path,
|
154 |
-
|
155 |
predictions,
|
156 |
debug=True,
|
157 |
)
|
158 |
|
159 |
-
metrics = calc_metrics(
|
160 |
print(metrics)
|
161 |
|
162 |
model.push_to_hub(hub_model, token=token) # Online saving
|
|
|
140 |
|
141 |
print("Evaluating fine-tuned model: " + model_name)
|
142 |
FastLanguageModel.for_inference(model) # Enable native 2x faster inference
|
143 |
+
predictions = eval_model(model, tokenizer, dataset["test"])
|
144 |
|
145 |
gpu_stats = torch.cuda.get_device_properties(0)
|
146 |
start_gpu_memory = round(torch.cuda.max_memory_reserved() / 1024 / 1024 / 1024, 3)
|
|
|
151 |
save_results(
|
152 |
model_name + "(unsloth_finetuned)",
|
153 |
results_path,
|
154 |
+
dataset["test"],
|
155 |
predictions,
|
156 |
debug=True,
|
157 |
)
|
158 |
|
159 |
+
metrics = calc_metrics(dataset["test"]["label"], predictions, debug=True)
|
160 |
print(metrics)
|
161 |
|
162 |
model.push_to_hub(hub_model, token=token) # Online saving
|
results/mgtv-results_colab_p2_gemma2.csv
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:157c7b5b2fba5a2f3874a202d9c210e7d4af22f52adce5a0f14309df17e064ba
|
3 |
+
size 2797369
|
results/mgtv-results_p2_r3_full_metrics.csv
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4010fd69e8ef81a6e6256e4733a49a760d567f010ebe9c3a36ac976564e53928
|
3 |
+
size 408
|
scripts/eval-mgtv-gemma2_9b.sh
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/sh
|
2 |
+
|
3 |
+
BASEDIR=$(dirname "$0")
|
4 |
+
cd $BASEDIR/..
|
5 |
+
echo Current Directory:
|
6 |
+
pwd
|
7 |
+
|
8 |
+
BASEDIR=`pwd`
|
9 |
+
|
10 |
+
nvidia-smi
|
11 |
+
uname -a
|
12 |
+
cat /etc/os-release
|
13 |
+
lscpu
|
14 |
+
grep MemTotal /proc/meminfo
|
15 |
+
|
16 |
+
# pip install unsloth[colab-new] file:///common2/dh.huang.2023/code/unsloth
|
17 |
+
# pip install --no-deps "xformers<0.0.27" "trl<0.9.0" peft accelerateΒ bitsandbytes
|
18 |
+
|
19 |
+
export LOAD_IN_4BIT=false
|
20 |
+
export USING_LLAMA_FACTORY=false
|
21 |
+
|
22 |
+
export MODEL_NAME=google/gemma-2-9b-it
|
23 |
+
export USING_P1_PROMPT_TEMPLATE=false
|
24 |
+
export LOGICAL_REASONING_DATA_PATH=datasets/mgtv
|
25 |
+
|
26 |
+
export MODEL_PREFIX=gemma2-9b_lora_sft_bf16
|
27 |
+
export LOGICAL_REASONING_RESULTS_PATH=results/$MODEL_PREFIX-p2.csv
|
28 |
+
export ADAPTER_PATH_BASE=unsloth/gemma-2-9b-it
|
29 |
+
|
30 |
+
echo "Eval $MODEL_NAME with $ADAPTER_PATH_BASE"
|
31 |
+
python llm_toolkit/eval_logical_reasoning_all_epochs.py
|
scripts/tune-mgtv-qwen2_7b.sh
CHANGED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/README.md
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/adapter_config.json
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/adapter_model.safetensors
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/optimizer.pt
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/rng_state.pth
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/scheduler.pt
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/special_tokens_map.json
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/tokenizer.json
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/tokenizer.model
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/tokenizer_config.json
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/trainer_state.json
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-1000/training_args.bin
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/README.md
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/adapter_config.json
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/adapter_model.safetensors
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/optimizer.pt
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/rng_state.pth
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/scheduler.pt
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/special_tokens_map.json
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/tokenizer.json
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/tokenizer.model
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/tokenizer_config.json
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/trainer_state.json
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10000/training_args.bin
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/README.md
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/adapter_config.json
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/adapter_model.safetensors
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/optimizer.pt
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/rng_state.pth
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/scheduler.pt
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/special_tokens_map.json
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/tokenizer.json
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/tokenizer.model
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/tokenizer_config.json
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/trainer_state.json
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-10500/training_args.bin
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-11000/README.md
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-11000/adapter_config.json
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-11000/adapter_model.safetensors
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-11000/optimizer.pt
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-11000/rng_state.pth
RENAMED
File without changes
|
unsloth/{gemma-2-96-it β gemma-2-9b-it}/checkpoint-11000/scheduler.pt
RENAMED
File without changes
|