export http_proxy="http://xx.xx.xx.xx:xxxx"
export https_proxy="http://xx.xx.xx.xx:xxxx"
export no_proxy=127.0.0.1,.huawei.com,localhost,local,.local

echo "============================================ prepareing megatron code ==========================================="
rm -rf Megatron-LM
# git clone https://github.com/NVIDIA/Megatron-LM.git
cp -rf Megatron-LM_bk Megatron-LM


echo "============================================ prepareing mindspeed code =========================================="
rm -rf MindSpeed
# git clone https://gitee.com/ascend/MindSpeed.git
cp -rf MindSpeed_bk MindSpeed
cp -rf MindSpeed/mindspeed ./Megatron-LM/

echo "============================================ prepareing environment ============================================="
source /root/.bashrc
conda activate mstestpy310
source /home/xxx/pckg/rc4b080/ascend-toolkit/set_env.sh
source /home/xxx/pckg/rc4b080/nnal/atb/set_env.sh

echo "============================================ data preprocessing ================================================="
cp prepare_coverage_data.py ./Megatron-LM/
cd Megatron-LM
sed -i '/import torch/a \import mindspeed.megatron_adaptor' pretrain_gpt.py
sed -i '/import torch/a \import mindspeed.megatron_adaptor' tools/preprocess_data.py
python prepare_coverage_data.py
if [ -f "../prepare_data/gpt-3.5-turbo/alpaca_text_document.bin" ]; then
    :
else
    python tools/preprocess_data.py --input ../prepare_data/gpt-3.5-turbo/alpaca_json.json \
                                --output-prefix ../prepare_data/gpt-3.5-turbo/alpaca \
                                --tokenizer-type GPT2BPETokenizer \
                                --vocab-file ../prepare_data/gpt-3.5-turbo/vocab.json \
                                --merge-file ../prepare_data/gpt-3.5-turbo/merges.txt \
                                --append-eod \
                                --log-interval 1000 \
                                --workers 8
fi

echo "============================================ core_model-pretrain_gpt_usecase.py ================================="
sed -i '/stimer = StragglerDetector()/a \import random\nimport time\nimport coverage\ncov = coverage.Coverage(data_suffix=f"usecase-{time.time_ns()}_{random.randint(0, 100)}")\ncov.start()' pretrain_gpt.py
sed -i '$a\    cov.stop()\n    cov.save()' pretrain_gpt.py

touch .coveragerc
cat>.coveragerc<<EOF
[run]
branch = True
source = ./
omit = setup.py
       pretrain_bert.py
       pretrain_ict.py
       tools/*
       tests/*
       megatron/*
[report]
show_missing = True
EOF

sed -i '/pipeline_model_parallel_split_rank:/a \            pipeline_model_parallel_comm_backend=None,' mindspeed/core/parallel_state.py
sed -i '/get_position_embedding_ranks:/a \            create_gloo_process_groups=True,' mindspeed/core/parallel_state.py
sed -i '/pipeline_model_parallel_split_rank,/a \                pipeline_model_parallel_comm_backend,' mindspeed/core/parallel_state.py
sed -i '/get_position_embedding_ranks,/a \                create_gloo_process_groups,' mindspeed/core/parallel_state.py

sed -i 's/def _set_cuda_rng_state(new_state, device=-1):/def _set_cuda_rng_state(new_state, device=-1, graph_safe: bool = False):/g' mindspeed/core/tensor_parallel/random.py

sed -i '/    packed_seq_params=None,/a \        sequence_len_offset=None,' mindspeed/core/transformer/attention.py
sed -i '/    packed_seq_params=None,/a \    sequence_len_offset=None,' mindspeed/core/transformer/transformer.py
sed -i 's/, packed_seq_params=None):/, packed_seq_params=None, sequence_len_offset=None):/g' mindspeed/core/transformer/transformer_block.py

sed -i '/self.check_for_nan_in_grad()/a \        self.check_grads(check_for_nan_or_inf=self.ddp_config.check_for_nan_in_grad, check_for_large=self.ddp_config.check_for_large_grads)' mindspeed/core/distributed/param_and_grad_buffer.py
sed -i 's/self.check_for_nan_in_grad()/# self.check_for_nan_in_grad()/g' mindspeed/core/distributed/param_and_grad_buffer.py

# sed -i '/from mindspeed.auto_tuning.module.parse.profiling_parse.profiling_node_parse import GatherNodeProfiling/a \from megatron.training import ft_integration' mindspeed/training.py
sed -i 's/ft_client=ft_integration.get_rank_monitor_client(/# ft_client=ft_integration.get_rank_monitor_client(/g' mindspeed/training.py
sed -i 's/ft_integration.StateMachineActions.SAVE_CHECKPOINT),/# ft_integration.StateMachineActions.SAVE_CHECKPOINT),/g' mindspeed/training.py

sed -i 's/def column_parallel_forward(x, input_, weight=None):/def column_parallel_forward(x, input_, weight=None, runtime_gather_output=None):/g' mindspeed/core/tensor_parallel/lcal_coc/min_comm_cfg.py

sed -i 's/for req in reqs.values():/for req in reqs if isinstance(reqs, list) else reqs.values():/g' mindspeed/core/pipeline_parallel/p2p_communication.py
sed -i 's/for req in fwd_wait_handles.values():/for req in fwd_wait_handles if isinstance(fwd_wait_handles, list) else fwd_wait_handles.values():/g' mindspeed/core/pipeline_parallel/ripipe_schedules.py
sed -i 's/for req in bwd_wait_handles.values():/for req in bwd_wait_handles if isinstance(bwd_wait_handles, list) else bwd_wait_handles.values():/g' mindspeed/core/pipeline_parallel/ripipe_schedules.py
sed -i 's/for wait_handle in bwd_wait_handles.values():/for wait_handle in bwd_wait_handles if isinstance(bwd_wait_handles, list) else bwd_wait_handles.values():/g' mindspeed/core/pipeline_parallel/ripipe_schedules.py

sed -i 's/megatron.core.transformer.custom_layers.transformer_engine/megatron.core.extensions.transformer_engine/g' mindspeed/core/models/gpt/gpt_layer_specs.py
sed -i 's/megatron.core.transformer.custom_layers.transformer_engine/megatron.core.extensions.transformer_engine/g' mindspeed/megatron_adaptor.py
sed -i 's/megatron.core.transformer.custom_layers.transformer_engine/megatron.core.extensions.transformer_engine/g' mindspeed/core/transformer/transformer_block.py

# sed -i 's/if isinstance(p2p_reqs, list):/if isinstance(p2p_reqs, list) or isinstance(reqs, list):/g' megatron/core/pipeline_parallel/p2p_communication.py
sed -i 's/if isinstance(p2p_reqs, list):/if isinstance(reqs, list):/g' megatron/core/pipeline_parallel/p2p_communication.py

sed -i 's/def permute(tokens, routing_map, num_out_tokens: int = None):/def permute(tokens, routing_map, num_out_tokens: int = None, fused: bool = False, drop_and_pad: bool = False):/g' mindspeed/core/transformer/moe/moe_utils.py

sed -i 's/num_global_tokens_per_local_expert_cpu/num_global_tokens_per_local_expert/g' mindspeed/core/transformer/moe/token_dispatcher.py

sed -i 's/out, last_chunk_out = out\[0\], out\[-1\]/out, last_chunk_out = out\[0\], out\[0\]/g' mindspeed/moe/ampipe/ampipe_moe_mlp_computer.py

python ../MindSpeed/tests_extend/system_tests/core_model/pretrain_gpt_usecase.py > test.log 2>&1

coverage combine .coverage.usecase-*
coverage html --data-file=.coverage -d ./coverage_report
