help:
	@echo "Usage: make [prepare_data|train|eval]"

prepare_data:
	# bash data_convert_alpaca.sh
	bash data_convert_speechless_thoughts_252k.sh

eval:
	bash eval_llama2_7b_mmlu.sh

eval_alpaca:
	bash eval_llama2_7b_alpaca_mmlu.sh

train:
	bash tune_llama2_7b_full_ptd.sh

train_lora:
	bash tune_llama2_7b_lora_ptd.sh

convert_to_final_model:
	bash convert_mcore_to_1x1.sh


infinity_instruct_9m:
	python process_infinity_dataset.py \
		--generate_inifinity_instruct_9m \
		--output_path /opt/local/datasets/BAAI/Infinity-Instruct-9M \
		--num_shards 20

infinity_instruct_50k:
	python process_infinity_dataset.py \
		--do_sampling \
		--output_path /opt/local/datasets/BAAI/Infinity-Instruct-50K \
		--num_samples 50000 \
		--num_shards 1

infinity_instruct_250k:
	python process_infinity_dataset.py \
		--do_sampling \
		--output_path /opt/local/datasets/BAAI/Infinity-Instruct-250K \
		--num_samples 250000 \
		--num_shards 1

infinity_instruct_1m:
	python process_infinity_dataset.py \
		--do_sampling \
		--output_path /opt/local/datasets/BAAI/Infinity-Instruct-1M \
		--num_samples 1000000 \
		--num_shards 4

infinity_instruct_3m:
	python process_infinity_dataset.py \
		--do_sampling \
		--output_path /opt/local/datasets/BAAI/Infinity-Instruct-3M \
		--num_samples 3000000 \
		--num_shards 12


